def test_getBackupHosts(test_config, tmp_path): config().jobspooldirectory = str(tmp_path) config().backupmailrecipients = ['*****@*****.**'] se = statusemail() # no jobs ret = se.getBackupHosts([]) assert ret == [] # disabled job path = os.path.join( os.path.dirname(__file__), 'etc/hostname-only.job', ) j = job(path) ret = se.getBackupHosts([j]) assert ret == [] # enabled job path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) ret = se.getBackupHosts([j]) assert ret == ['localhost']
def test_getMissingHosts(test_config, tmp_path): config().jobspooldirectory = str(tmp_path) config().backupmailrecipients = ['*****@*****.**'] se = statusemail() # no jobs nor history se.history = [] hosts = se.getMissingHosts([]) assert hosts == [] # enabled job, no history path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) se.history = [] hosts = se.getMissingHosts([j]) assert len(hosts) == 1 assert 'localhost' in hosts # enabled job, job in history se.history = [ { 'hostname': 'localhost', }, ] hosts = se.getMissingHosts([j]) assert hosts == [] # disabled job, no history path = os.path.join( os.path.dirname(__file__), 'etc/hostname-only.job', ) j = job(path) se.history = [] hosts = se.getMissingHosts([j]) assert hosts == []
def getJobArray(self, jobpath): jobArray = [] if jobpath is None: dir = config().jobconfigdirectory.rstrip('/') if(os.path.exists(dir)): os.chdir(dir) for file in glob.glob("*.job"): jobArray.append(job(dir + "/" + file)) else: logger().error("Job directory (%s) doesn't exists, exiting (1)" % dir) else: jobArray.append(job(jobpath)) return jobArray
def test_hooks_invalid_properties(): path = os.path.join( os.path.dirname(__file__), 'etc/hooks-invalid-properties.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 873 assert j.rsyncshare == 'backup' assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey == '/home/autorsyncbackup/.ssh/id_rsa' hooks = [ { 'local': 'yes', 'runtime': 'any', 'continueonerror': 'perhaps', }, ] assert pprint.pformat(j.hooks) == pprint.pformat(hooks) assert j.beforeLocalHooks == [] assert j.afterLocalHooks == [] assert j.beforeRemoteHooks == [] assert j.afterRemoteHooks == []
def test_default_config(test_config): path = os.path.join( os.path.dirname(__file__), 'etc/default-config.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.ssh_sudo is False assert j.sshusername is None assert j.sshprivatekey is None assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.rsyncshare == 'backup' assert j.port == 873 assert j.backupdir == config().backupdir assert j.speedlimitkb == config().speedlimitkb assert j.dailyrotation == config().dailyrotation assert j.weeklyrotation == config().weeklyrotation assert j.monthlyrotation == config().monthlyrotation assert j.weeklybackup == config().weeklybackup assert j.monthlybackup == config().monthlybackup assert j.exclude == config().exclude
def test_hooks_remote_after(): path = os.path.join( os.path.dirname(__file__), 'etc/hooks-remote-after.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 873 assert j.rsyncshare == 'backup' assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey == '/home/autorsyncbackup/.ssh/id_rsa' hooks = [ { 'script': 'uptime', 'local': False, 'runtime': 'after', 'continueonerror': True, }, ] assert pprint.pformat(j.hooks) == pprint.pformat(hooks) assert j.beforeLocalHooks == [] assert j.afterLocalHooks == [] assert j.beforeRemoteHooks == [] assert pprint.pformat(j.afterRemoteHooks) == pprint.pformat(hooks)
def test_executeRemoteCommand(monkeypatch): def mock_connect(self, hostname, username=None, key_filename=None): return True def mock_exec_command(self, command): stdin = io.StringIO('') stdout = io.StringIO('Mock STDOUT\n0') stderr = io.StringIO('') return stdin, stdout, stderr monkeypatch.setattr(paramiko.SSHClient, 'connect', mock_connect) monkeypatch.setattr(paramiko.SSHClient, 'exec_command', mock_exec_command) path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) cmd = command() (status, stdout, stderr) = cmd.executeRemoteCommand(j, 'uptime') assert status == 0 assert 'Mock STDOUT' in stdout
def test_identifyJob_error(tmp_path, caplog): logger().debuglevel = 3 jrh = jobrunhistory(str(tmp_path), check=True) backupstatus = { 'hostname': 'localhost', 'startdatetime': time.time(), 'rsync_total_file_size': 1337, 'rsync_literal_data': 42, } hooks = [] jrh.insertJob(backupstatus, hooks) path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) directory = datetime.datetime.today().strftime( "%Y-%m-%d_%H-%M-%S_backup.0") i = jrh.identifyJob(j, directory) assert 'cannot identify job for' in caplog.text assert i is None
def getJobArray(self, jobpath=None): jobArray = [] if jobpath is None: directory = config().jobconfigdirectory.rstrip('/') if (os.path.exists(directory)): os.chdir(directory) for filename in glob.glob("*.job"): jobArray.append(job(directory + "/" + filename)) else: logger().error( "Job directory (%s) doesn't exists, exiting (1)" % directory) else: jobArray.append(job(jobpath)) return jobArray
def test_empty(): path = os.path.join( os.path.dirname(__file__), 'etc/empty.job', ) j = job(path) assert j.enabled is False
def test_rsync_no_username(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync-no-username.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername is None
def test_rsync_no_password(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync-no-password.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword is None
def test_generateExclude(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync.job', ) j = job(path) r = rsync() exclude = r.generateExclude(j) assert exclude == (" --exclude '*.bak'" " --exclude '.cache/*'")
def test_ssh_no_username(): path = os.path.join( os.path.dirname(__file__), 'etc/ssh-no-username.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is True assert j.ssh_sudo is True assert j.sshusername is None
def test_hostname_only(): path = os.path.join( os.path.dirname(__file__), 'etc/hostname-only.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is False assert j.ssh_sudo is False assert j.sshusername is None
def test_ssh_no_privatekey(): path = os.path.join( os.path.dirname(__file__), 'etc/ssh-no-privatekey.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is True assert j.ssh_sudo is True assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey is None
def test_rsync_no_port(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync-no-port.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 873
def test_ssh_no_port(): path = os.path.join( os.path.dirname(__file__), 'etc/ssh-no-port.job', ) j = job(path) assert j.enabled is False assert j.hostname == 'localhost' assert j.ssh is True assert j.ssh_sudo is True assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey == '/home/autorsyncbackup/.ssh/id_rsa' assert j.port == 22
def test_executeLocalCommand(): path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) cmd = command() (status, stdout, stderr) = cmd.executeLocalCommand(j, 'uptime') assert status == 0 assert 'load average' in stdout assert stderr == ''
def test_rsync_fileset(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync-fileset.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 873 assert j.rsyncshare == 'backup' assert j.include == ['/etc']
def test_generateInclude_error(caplog): logger().debuglevel = 3 path = os.path.join( os.path.dirname(__file__), 'etc/rsync-no-include.job', ) j = job(path) r = rsync() include = r.generateInclude(j) assert include is False assert 'No include/fileset specified' in caplog.text
def test_generateInclude_ssh(): path = os.path.join( os.path.dirname(__file__), 'etc/ssh.job', ) j = job(path) r = rsync() include = r.generateInclude(j) assert include == " %s@%s:%s" % ( j.sshusername, j.hostname, '/etc', )
def test_checkRemoteHostViaSshProtocol(monkeypatch): def mock_connect(self, hostname, username=None, key_filename=None): return True monkeypatch.setattr(paramiko.SSHClient, 'connect', mock_connect) path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) cmd = command() ret = cmd.checkRemoteHostViaSshProtocol(j) assert ret is True
def test_job(): path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.ssh_sudo is False assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey == '/home/autorsyncbackup/.ssh/id_rsa' assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 10873 assert j.rsyncshare == 'backup' assert j.backupdir == '/tmp' assert j.speedlimitkb == 10000 assert j.dailyrotation == 3 assert j.weeklyrotation == 2 assert j.monthlyrotation == 1 assert j.weeklybackup == 3 assert j.monthlybackup == 2 assert j.include == ['/etc'] assert j.exclude == ['*.bak', '.cache/*'] hooks = [ { 'script': 'uptime', 'local': True, 'runtime': 'before', 'continueonerror': True, }, { 'script': 'cat /etc/motd', 'local': False, 'runtime': 'after', 'continueonerror': True, }, ] assert pprint.pformat(j.hooks) == pprint.pformat(hooks)
def test_generateInclude_rsync(): path = os.path.join( os.path.dirname(__file__), 'etc/rsync.job', ) j = job(path) r = rsync() include = r.generateInclude(j) assert include == " rsync://%s@%s:%s/%s%s" % ( j.rsyncusername, j.hostname, j.port, j.rsyncshare, '/etc', )
def test_executeRsyncViaSshProtocol(test_config, tmp_path): config().jobspooldirectory = str(tmp_path) config().rsyncpath = os.path.join( os.path.dirname(__file__), 'bin/mock-rsync-ok.sh', ) path = os.path.join( os.path.dirname(__file__), 'etc/ssh.job', ) j = job(path) r = rsync() (status, stdout) = r.executeRsyncViaSshProtocol(j, None) assert status == 0 assert 'sending incremental file list' in stdout
def test_checkRemoteHost_rsync_fail(test_config, tmp_path): config().jobspooldirectory = str(tmp_path) config().rsyncpath = os.path.join( os.path.dirname(__file__), 'bin/mock-rsync-fail.sh', ) path = os.path.join( os.path.dirname(__file__), 'etc/rsync.job', ) j = job(path) r = rsync() ret = r.checkRemoteHost(j) assert ret is False assert j.backupstatus['rsync_backup_status'] == int(ret)
def test_checkRemoteHost_ssh_fail(test_config, tmp_path, monkeypatch): def mock_connect(self, hostname, username=None, key_filename=None): raise IOError('Mock connection failed') monkeypatch.setattr(paramiko.SSHClient, 'connect', mock_connect) config().jobspooldirectory = str(tmp_path) path = os.path.join( os.path.dirname(__file__), 'etc/ssh.job', ) j = job(path) r = rsync() ret = r.checkRemoteHost(j) assert ret is False assert j.backupstatus['rsync_backup_status'] == int(ret)
def test_showjob(capsys): path = os.path.join( os.path.dirname(__file__), 'etc/localhost.job', ) j = job(path) j.showjob() captured = capsys.readouterr() output_regex = (r'Show job:\n' r'enabled: True\n' r'filepath: ' + path + r'\n' r'hostname: localhost\n' r'rsyncusername: autorsyncbackup\n' r'rsyncpassword: fee-fi-fo-fum\n' r'rsyncshare: backup\n' r'sshusername: autorsyncbackup\n' r'sshprivatekey: /home/autorsyncbackup/.ssh/id_rsa\n' r'backupdir: /tmp\n' r'speedlimitkb: 10000\n' r'dailyrotation: 3\n' r'weeklyrotation: 2\n' r'monthlyrotation: 1\n' r'weeklybackup: 3\n' r'monthlybackup: 2\n' r"include: \['/etc'\]\n" r"exclude: \['\*\.bak', '\.cache/\*'\]\n" r'backupstatus: {}\n' r'hooks: \[{.*?}, {.*?}\]\n' r'beforeLocalHooks: \[.*?\]\n' r'afterLocalHooks: \[\]\n' r'beforeRemoteHooks: \[\]\n' r'beforeRemoteHooks: \[\]\n' r'afterRemoteHooks: \[{.*?}\]\n' r'$') assert re.match(output_regex, captured.out)
def test_executeRsync_ssh_latest(test_config, tmp_path): config().jobspooldirectory = str(tmp_path) config().rsyncpath = os.path.join( os.path.dirname(__file__), 'bin/mock-rsync-ok.sh', ) path = os.path.join( os.path.dirname(__file__), 'etc/ssh.job', ) j = job(path) r = rsync() ret = r.executeRsync(j, 'foo') assert ret is True assert j.backupstatus['rsync_backup_status'] == int(ret) assert j.backupstatus['rsync_return_code'] == 0 assert 'sending incremental file list' in j.backupstatus['rsync_stdout']
def test_hooks_no_properties(): path = os.path.join( os.path.dirname(__file__), 'etc/hooks-no-properties.job', ) j = job(path) assert j.enabled is True assert j.hostname == 'localhost' assert j.ssh is False assert j.rsyncusername == 'autorsyncbackup' assert j.rsyncpassword == 'fee-fi-fo-fum' assert j.port == 873 assert j.rsyncshare == 'backup' assert j.sshusername == 'autorsyncbackup' assert j.sshprivatekey == '/home/autorsyncbackup/.ssh/id_rsa' assert j.hooks == [None] assert j.beforeLocalHooks == [] assert j.afterLocalHooks == [] assert j.beforeRemoteHooks == [] assert j.afterRemoteHooks == []