def postgresql_basebackup(job_data): try: job_name = job_data['job'] backup_type = job_data['type'] tmp_dir = job_data['tmp_dir'] sources = job_data['sources'] storages = job_data['storages'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) return 1 full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): try: connect = sources[i]['connect'] gzip = sources[i]['gzip'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_user and db_host and db_password): log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('postgresql') try: connection = psycopg2.connect(dbname="postgres", user=db_user, password=db_password, host=db_host, port=db_port) except psycopg2.Error as err: log_and_mail.writelog('ERROR', "Can't connect to PostgreSQL instances with with following data host='%s', port='%s', user='******', passwd='%s':%s" %(db_host, db_port, db_user, db_password, err), config.filelog_fd, job_name) continue else: connection.close() backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, 'postgresq_hot', 'tar', gzip) periodic_backup.remove_old_local_file(storages, '', job_name) str_auth = ' --dbname=postgresql://%s:%s@%s:%s/ ' %(db_user, db_password, db_host, db_port) if is_success_pgbasebackup(extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, '', job_name) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def postgresql_basebackup(job_data): is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \ general_function.get_job_parameters(job_data) if not is_prams_read: return full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): try: connect = sources[i]['connect'] gzip = sources[i]['gzip'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_user or db_host or db_password): log_and_mail.writelog( 'ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('postgresql') try: connection = psycopg2.connect(dbname="postgres", user=db_user, password=db_password, host=db_host, port=db_port) except psycopg2.Error as err: log_and_mail.writelog( 'ERROR', f"Can't connect to PostgreSQL instances with with following data host='{db_host}', " f"port='{db_port}', user='******', passwd='{db_password}':{err}", config.filelog_fd, job_name) continue else: connection.close() backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, 'postgresq_hot', 'tar', gzip, i) periodic_backup.remove_old_local_file(storages, '', job_name) str_auth = f' --dbname=postgresql://{db_user}:{db_password}@{db_host}:{db_port}/ ' if is_success_pgbasebackup(extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, '', job_name, safety_backup) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def mongodb_backup(job_data): ''' Function, creates a mongodb backup. At the entrance receives a dictionary with the data of the job. ''' try: job_name = job_data['job'] backup_type = job_data['type'] tmp_dir = job_data['tmp_dir'] sources = job_data['sources'] storages = job_data['storages'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) return 1 full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): exclude_dbs_list = sources[i].get('exclude_dbs', []) exclude_collections_list = sources[i].get('exclude_collections', []) try: connect = sources[i]['connect'] target_db_list = sources[i]['target_dbs'] target_collection_list = sources[i]['target_collections'] gzip = sources[i]['gzip'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_host and not (bool(db_user) ^ bool(db_password))): log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('mongodb') is_all_flag_db = is_all_flag_collection = False if 'all' in target_db_list: is_all_flag_db = True if 'all' in target_collection_list: is_all_flag_collection = True if db_user: uri = "mongodb://%s:%s@%s:%s/" % (db_user, db_password, db_host, db_port) # for pymongo str_auth = " --host %s --port %s --username %s --password %s " %(db_host, db_port, db_user, db_password) # for mongodump else: uri = "mongodb://%s:%s/" % (db_host, db_port) str_auth = " --host %s --port %s " %(db_host, db_port) if is_all_flag_db: try: client = pymongo.MongoClient(uri) target_db_list = client.database_names() except pymongo.errors.PyMongoError as err: log_and_mail.writelog('ERROR', "Can't connect to MongoDB instances with the following data host='%s', port='%s', user='******', passwd='%s':%s" %(db_host, db_port, db_user, db_password, err), config.filelog_fd, job_name) continue finally: client.close() for db in target_db_list: if not db in exclude_dbs_list: try: client = pymongo.MongoClient(uri) current_db = client[db] collection_list = current_db.collection_names() except pymongo.errors.PyMongoError as err: log_and_mail.writelog('ERROR', "Can't connect to MongoDB instances with the following data host='%s', port='%s', user='******', passwd='%s':%s" %(db_host, db_port, db_user, db_password, err), config.filelog_fd, job_name) continue finally: client.close() if is_all_flag_collection: target_collection_list = collection_list for collection in target_collection_list: if not collection in exclude_collections_list and collection in collection_list: str_auth_finally = "%s --collection %s " %(str_auth, collection) backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, collection, 'mongodump', gzip) part_of_dir_path = os.path.join(db, collection) periodic_backup.remove_old_local_file(storages, part_of_dir_path, job_name) if is_success_mongodump(collection, db, extra_keys, str_auth_finally, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, part_of_dir_path, job_name) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def redis_backup(job_data): is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \ general_function.get_job_parameters(job_data) if not is_prams_read: return full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): try: connect = sources[i]['connect'] gzip = sources[i]['gzip'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_password = connect.get('db_password') socket = connect.get('socket') if not (db_host or socket): log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('redis') try: if db_host: if db_password: redis.StrictRedis(host=db_host, port=db_port, password=db_password) str_auth = f" -h {db_host} -p {db_port} -a '{db_password}' " else: redis.StrictRedis(host=db_host, port=db_port) str_auth = f" -h {db_host} -p {db_port} " else: if db_password: redis.StrictRedis(unix_socket_path=socket, password=db_password) str_auth = f" -s {socket} -a '{db_password}' " else: redis.StrictRedis(unix_socket_path=socket) str_auth = f" -s {socket} " except (redis.exceptions.ConnectionError, ConnectionRefusedError) as err: log_and_mail.writelog('ERROR', f"Can't connect to Redis instances with with following data host='{db_host}', " f"port='{db_port}', passwd='{db_password}', socket='{socket}': {err}", config.filelog_fd, job_name) continue else: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, 'redis', 'rdb', gzip) periodic_backup.remove_old_local_file(storages, '', job_name) if is_success_bgsave(str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, '', job_name, safety_backup) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def mongodb_backup(job_data): """ Function, creates a mongodb backup. At the entrance receives a dictionary with the data of the job. """ is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \ general_function.get_job_parameters(job_data) if not is_prams_read: return full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) dumped_collections = {} for i in range(len(sources)): exclude_dbs_list = sources[i].get('exclude_dbs', []) exclude_collections_list = sources[i].get('exclude_collections', []) try: connect = sources[i]['connect'] target_db_list = sources[i]['target_dbs'] target_collection_list = sources[i]['target_collections'] gzip = sources[i]['gzip'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_host and not (bool(db_user) ^ bool(db_password))): log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('mongodb') is_all_flag_db = is_all_flag_collection = False if 'all' in target_db_list: is_all_flag_db = True if 'all' in target_collection_list: is_all_flag_collection = True if db_user: uri = f"mongodb://{db_user}:{db_password}@{db_host}:{db_port}/" # for pymongo str_auth = f" --host {db_host} --port {db_port} --username {db_user} --password {db_password} " else: uri = f"mongodb://{db_host}:{db_port}/" str_auth = f" --host {db_host} --port {db_port} " client = None if is_all_flag_db: try: client = pymongo.MongoClient(uri) target_db_list = client.list_database_names() except PyMongoError as err: log_and_mail.writelog('ERROR', f"Can't connect to MongoDB instances with the following data host='{db_host}', " f"port='{db_port}', user='******', passwd='{db_password}':{err}", config.filelog_fd, job_name) continue finally: if client: client.close() for db in target_db_list: if db not in exclude_dbs_list: try: client = pymongo.MongoClient(uri) current_db = client[db] collection_list = current_db.collection_names() except PyMongoError as err: log_and_mail.writelog( 'ERROR', f"Can't connect to MongoDB instances with the following data host='{db_host}', " f"port='{db_port}', user='******', passwd='{db_password}':{err}", config.filelog_fd, job_name) continue finally: if client: client.close() if is_all_flag_collection: target_collection_list = collection_list for collection in target_collection_list: if collection not in exclude_collections_list and collection in collection_list: str_auth_finally = f"{str_auth} --collection {collection} " backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, collection, 'mongodump', gzip, f'{i}-{db}-') part_of_dir_path = os.path.join(db, collection) periodic_backup.remove_old_local_file(storages, part_of_dir_path, job_name) if is_success_mongodump(collection, db, extra_keys, str_auth_finally, backup_full_tmp_path, gzip, job_name): dumped_collections[collection] = {'success': True, 'tmp_path': backup_full_tmp_path, 'part_of_dir_path': part_of_dir_path} else: dumped_collections[collection] = {'success': False} if deferred_copying_level <= 0 and dumped_collections[collection]['success']: periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, part_of_dir_path, job_name, safety_backup) for collection, result in dumped_collections.items(): if deferred_copying_level == 1 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], storages, result['part_of_dir_path'], job_name, safety_backup) for collection, result in dumped_collections.items(): if deferred_copying_level == 2 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], storages, result['part_of_dir_path'], job_name, safety_backup) for collection, result in dumped_collections.items(): if deferred_copying_level >= 3 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], storages, result['part_of_dir_path'], job_name, safety_backup) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def mysql_backup(job_data): try: job_name = job_data['job'] backup_type = job_data['type'] tmp_dir = job_data['tmp_dir'] sources = job_data['sources'] storages = job_data['storages'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) return 1 full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): exclude_list = sources[i].get('excludes', []) try: connect = sources[i]['connect'] target_list = sources[i]['target'] gzip = sources[i]['gzip'] is_slave = sources[i]['is_slave'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') socket = connect.get('socket') db_user = connect.get('db_user') db_password = connect.get('db_password') auth_file = connect.get('auth_file') if not (auth_file or ((db_host or socket) and db_user and db_password)): log_and_mail.writelog( 'ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('mysql') is_all_flag = False if 'all' in target_list: is_all_flag = True try: (connection_1, str_auth) = get_connection(db_host, db_port, db_user, db_password, auth_file, socket, job_name) except: continue cur_1 = connection_1.cursor() if is_all_flag: cur_1.execute("SHOW DATABASES") target_list = [i[0] for i in cur_1.fetchall()] if is_slave: try: cur_1.execute("STOP SLAVE") except MySQLdb.Error as err: log_and_mail.writelog('ERROR', f"Can't stop slave: {err}", config.filelog_fd, job_name) connection_1.close() for db in target_list: if not db in exclude_list: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, db, 'sql', gzip) periodic_backup.remove_old_local_file(storages, db, job_name) if is_success_mysqldump(db, extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration( backup_full_tmp_path, storages, db, job_name) if is_slave: try: (connection_2, str_auth) = get_connection(db_host, db_port, db_user, db_password, auth_file, socket, job_name) cur_2 = connection_2.cursor() cur_2.execute("START SLAVE") except MySQLdb.Error as err: log_and_mail.writelog('ERROR', f"Can't start slave: {err} ", config.filelog_fd, job_name) finally: connection_2.close() # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def mysql_backup(job_data): is_prams_read, job_name, backup_type, tmp_dir, sources, storages, safety_backup, deferred_copying_level = \ general_function.get_job_parameters(job_data) if not is_prams_read: return full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) dumped_dbs = {} for i in range(len(sources)): exclude_list = sources[i].get('excludes', []) try: connect = sources[i]['connect'] target_list = sources[i]['target'] gzip = sources[i]['gzip'] is_slave = sources[i]['is_slave'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') socket = connect.get('socket') db_user = connect.get('db_user') db_password = connect.get('db_password') auth_file = connect.get('auth_file') if not (auth_file or ((db_host or socket) or db_user or db_password)): log_and_mail.writelog( 'ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('mysql') is_all_flag = False if 'all' in target_list: is_all_flag = True connection_1, str_auth = get_connection(db_host, db_port, db_user, db_password, auth_file, socket, job_name) if connection_1 is None: continue cur_1 = connection_1.cursor() if is_all_flag: cur_1.execute("SHOW DATABASES") target_list = [i[0] for i in cur_1.fetchall()] if is_slave: try: cur_1.execute("STOP SLAVE") except MySQLdb.Error as err: log_and_mail.writelog('ERROR', f"Can't stop slave: {err}", config.filelog_fd, job_name) connection_1.close() for db in target_list: if db not in exclude_list: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, db, 'sql', gzip, i) periodic_backup.remove_old_local_file(storages, db, job_name) if is_success_mysqldump(db, extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): dumped_dbs[db] = { 'success': True, 'tmp_path': backup_full_tmp_path } else: dumped_dbs[db] = {'success': False} if deferred_copying_level <= 0 and dumped_dbs[db]['success']: periodic_backup.general_desc_iteration( backup_full_tmp_path, storages, db, job_name, safety_backup) if is_slave: connection_2, str_auth = get_connection(db_host, db_port, db_user, db_password, auth_file, socket, job_name) if connection_2 is None: log_and_mail.writelog( 'ERROR', f"Can't start slave: Can't connect to MySQL.", config.filelog_fd, job_name) return cur_2 = connection_2.cursor() try: cur_2.execute("START SLAVE") except MySQLdb.Error as err: log_and_mail.writelog('ERROR', f"Can't start slave: {err} ", config.filelog_fd, job_name) connection_2.close() for db, result in dumped_dbs.items(): if deferred_copying_level == 1 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], storages, db, job_name, safety_backup) for db, result in dumped_dbs.items(): if deferred_copying_level >= 2 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], storages, db, job_name, safety_backup) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def redis_backup(job_data): try: job_name = job_data['job'] backup_type = job_data['type'] tmp_dir = job_data['tmp_dir'] sources = job_data['sources'] storages = job_data['storages'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) return 1 full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): try: connect = sources[i]['connect'] gzip = sources[i]['gzip'] except KeyError as e: log_and_mail.writelog('ERROR', "Missing required key:'%s'!" %(e), config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_password = connect.get('db_password') socket = connect.get('socket') if not (db_host or socket): log_and_mail.writelog('ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('redis') try: if db_host: if db_password: redis.StrictRedis(host=db_host, port=db_port, password=db_password) str_auth = " -h %s -p %s -a '%s' " %(db_host, db_port, db_password) else: redis.StrictRedis(host=db_host, port=db_port) str_auth = " -h %s -p %s " %(db_host, db_port) else: if db_password: redis.StrictRedis(unix_socket_path=socket, password=db_password) str_auth = " -s %s -a '%s' " %(socket, db_password) else: redis.StrictRedis(unix_socket_path=socket) str_auth = " -s %s " %(socket) except (redis.exceptions.ConnectionError, ConnectionRefusedError) as err: log_and_mail.writelog('ERROR', "Can't connect to Redis instances with with following data host='%s', port='%s', passwd='%s', socket='%s' :%s" %(db_host, db_port, db_password, socket, err), config.filelog_fd, job_name) continue else: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, 'redis', 'rdb', gzip) periodic_backup.remove_old_local_file(storages, '', job_name) if is_success_bgsave(str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration(backup_full_tmp_path, storages, '', job_name) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def postgresql_backup(job_data): try: job_name = job_data['job'] backup_type = job_data['type'] tmp_dir = job_data['tmp_dir'] sources = job_data['sources'] storages = job_data['storages'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) return 1 full_path_tmp_dir = general_function.get_tmp_dir(tmp_dir, backup_type) for i in range(len(sources)): exclude_list = sources[i].get('excludes', []) try: connect = sources[i]['connect'] target_list = sources[i]['target'] gzip = sources[i]['gzip'] extra_keys = sources[i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_user and db_host and db_password): log_and_mail.writelog( 'ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('postgresql') is_all_flag = False if 'all' in target_list: is_all_flag = True if is_all_flag: try: connection = psycopg2.connect(dbname="postgres", user=db_user, password=db_password, host=db_host, port=db_port) except psycopg2.Error as err: log_and_mail.writelog( 'ERROR', f"Can't connect to PostgreSQL instances with with following data host='{db_host}', port='{db_port}', user='******', passwd='{db_password}':{err}", config.filelog_fd, job_name) continue cur = connection.cursor() cur.execute("select datname from pg_database;") target_list = [i[0] for i in cur.fetchall()] connection.close() for db in target_list: if not db in exclude_list: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, db, 'pgdump', gzip) periodic_backup.remove_old_local_file(storages, db, job_name) str_auth = f' --dbname=postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db} ' if is_success_pgdump(db, extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): periodic_backup.general_desc_iteration( backup_full_tmp_path, storages, db, job_name) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(backup_type, full_path_tmp_dir, '/var/cache/davfs2/*')
def postgresql_backup(job_data): is_prams_read, job_name, options = general_function.get_job_parameters( job_data) if not is_prams_read: return full_path_tmp_dir = general_function.get_tmp_dir(options['tmp_dir'], options['backup_type']) dumped_dbs = {} for i in range(len(options['sources'])): exclude_list = options['sources'][i].get('excludes', []) try: connect = options['sources'][i]['connect'] target_list = options['sources'][i]['target'] gzip = options['sources'][i]['gzip'] extra_keys = options['sources'][i]['extra_keys'] except KeyError as e: log_and_mail.writelog('ERROR', f"Missing required key:'{e}'!", config.filelog_fd, job_name) continue db_host = connect.get('db_host') db_port = connect.get('db_port') db_user = connect.get('db_user') db_password = connect.get('db_password') if not (db_user or db_host or db_password): log_and_mail.writelog( 'ERROR', "Can't find the authentication data, please fill in the required fields", config.filelog_fd, job_name) continue if not db_port: db_port = general_function.get_default_port('postgresql') is_all_flag = False if 'all' in target_list: is_all_flag = True if is_all_flag: try: connection = psycopg2.connect(dbname="postgres", user=db_user, password=db_password, host=db_host, port=db_port) except psycopg2.Error as err: log_and_mail.writelog( 'ERROR', f"Can't connect to PostgreSQL instances with with following data host='{db_host}', " f"port='{db_port}', user='******', passwd='{db_password}':{err}", config.filelog_fd, job_name) continue cur = connection.cursor() cur.execute("select datname from pg_database;") target_list = [i[0] for i in cur.fetchall()] connection.close() for db in target_list: if db not in exclude_list: backup_full_tmp_path = general_function.get_full_path( full_path_tmp_dir, db, 'pgdump.sql', gzip, i) periodic_backup.remove_local_file(options['storages'], db, job_name) str_auth = f' --dbname=postgresql://{db_user}:{db_password}@{db_host}:{db_port}/{db} ' if is_success_pgdump(db, extra_keys, str_auth, backup_full_tmp_path, gzip, job_name): dumped_dbs[db] = { 'success': True, 'tmp_path': backup_full_tmp_path } else: dumped_dbs[db] = {'success': False} if options['deferred_copying_level'] <= 0 and dumped_dbs[db][ 'success']: periodic_backup.general_desc_iteration( backup_full_tmp_path, options['storages'], db, job_name, options['safety_backup']) for db, result in dumped_dbs.items(): if options['deferred_copying_level'] == 1 and result['success']: periodic_backup.general_desc_iteration( result['tmp_path'], options['storages'], db, job_name, options['safety_backup']) for db, result in dumped_dbs.items(): if options['deferred_copying_level'] >= 2 and result['success']: periodic_backup.general_desc_iteration(result['tmp_path'], options['storages'], db, job_name, options['safety_backup']) # After all the manipulations, delete the created temporary directory and # data inside the directory with cache davfs, but not the directory itself! general_function.del_file_objects(options['backup_type'], full_path_tmp_dir, '/var/cache/davfs2/*')