Ejemplo n.º 1
0
def test_getBackupHosts(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().backupmailrecipients = ['*****@*****.**']

    se = statusemail()

    # no jobs
    ret = se.getBackupHosts([])

    assert ret == []

    # disabled job
    path = os.path.join(
               os.path.dirname(__file__),
               'etc/hostname-only.job',
           )

    j = job(path)

    ret = se.getBackupHosts([j])

    assert ret == []

    # enabled job
    path = os.path.join(
               os.path.dirname(__file__),
               'etc/localhost.job',
           )

    j = job(path)

    ret = se.getBackupHosts([j])

    assert ret == ['localhost']
Ejemplo n.º 2
0
def test_getOverallBackupState_warning(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().backupmailrecipients = ['*****@*****.**']

    se = statusemail()

    se.history = [
                   {
                     'rsync_backup_status': 1,
                     'sanity_check':        1,
                     'integrity_confirmed': True,
                     'commands':            [
                                              {
                                                'returncode':      1,
                                                'continueonerror': True,
                                              },
                                            ],
                   },
                 ]

    (ret, good, warning, bad) = se.getOverallBackupState(se.history)

    assert ret == 'warning'
    assert len(good) == 0
    assert len(warning) == 1
    assert len(bad) == 0
Ejemplo n.º 3
0
def test_runBackup_dryrun(test_config, tmp_path, monkeypatch):
    email_path = os.path.join(str(tmp_path), 'status.eml')

    def mock_send(self, message):
        with open(email_path, 'w') as f:
            f.write(message.as_string())

        return True

    monkeypatch.setattr(mailer.Mailer, 'send', mock_send)

    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )
    config().backupmailrecipients = ['*****@*****.**']

    path = os.path.join(
        os.path.dirname(__file__),
        'rsync.job',
    )

    runBackup(path, True)

    assert os.path.exists(email_path) is False
    assert os.path.exists(config().lockfile) is False
Ejemplo n.º 4
0
def test_runBackup_no_jobs(test_config, tmp_path, monkeypatch, caplog):
    email_path = os.path.join(str(tmp_path), 'sudden-death.eml')

    def mock_send(self, message):
        with open(email_path, 'w') as f:
            f.write(message.as_string())

        return True

    monkeypatch.setattr(mailer.Mailer, 'send', mock_send)

    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )
    config().backupmailrecipients = ['*****@*****.**']

    with pytest.raises(Exception) as e:
        runBackup(str(tmp_path), False)

    assert e is not None

    message = 'Error while reading %s, skipping job' % str(tmp_path)

    assert message in caplog.text

    assert os.path.exists(email_path) is False
    assert os.path.exists(config().lockfile) is False
Ejemplo n.º 5
0
def test_config(tmp_path):
    path = os.path.join(
        str(tmp_path),
        'main.yaml',
    )

    options = {
        'lockfile': os.path.join(str(tmp_path), 'autorsyncbackup.pid'),
        'jobconfigdirectory': os.path.join(
            str(tmp_path),
            'etc',
        ),
        'jobspooldirectory': os.path.join(
            str(tmp_path),
            'spool',
        ),
        'backupdir': os.path.join(
            str(tmp_path),
            'backups',
        ),
        'logfile': os.path.join(str(tmp_path), 'log/autorsyncbackup.log'),
        'debuglevel': 4,
    }

    with open(path, 'w') as f:
        for key in options:
            f.write("%s: %s\n" % (key, options[key]))

    config(path)

    return True
Ejemplo n.º 6
0
 def getWorkingDirectory(self):
     """Check in which folder we place the backup today"""
     ret = "daily"
     if(int(datetime.datetime.today().strftime("%w")) == config().weeklybackup):
         ret = "weekly"
     if(int(datetime.datetime.today().strftime("%d")) == config().monthlybackup):
         ret = "monthly"
     return ret
Ejemplo n.º 7
0
 def _send(self, subject, htmlbody):
     for to in config().backupmailrecipients:
         logger().info("INFO: Sent backup report to [%s] via SMTP:%s" % (to, config().smtphost))
         message = Message(From=config().backupmailfrom, To=to, charset="utf-8")
         message.Subject = subject
         message.Html = htmlbody
         message.Body = """This is an HTML e-mail with the backup overview, please use a HTML enabled e-mail client."""
         sender = Mailer(config().smtphost)
         sender.send(message)
Ejemplo n.º 8
0
 def _send(self, subject, htmlbody, textbody):
     for to in config().backupmailrecipients:
         logger().info("Sent backup report to [%s] via SMTP:%s" % (to, config().smtphost))
         message = Message(From=config().backupmailfrom, To=to, charset="utf-8")
         message.Subject = subject
         message.Html = htmlbody
         message.Body = textbody
         sender = Mailer(config().smtphost)
         sender.send(message)
Ejemplo n.º 9
0
 def getWorkingDirectory(self):
     """Check in which folder we place the backup today"""
     ret = "daily"
     if (int(datetime.datetime.today().strftime("%w")) ==
             config().weeklybackup):
         ret = "weekly"
     if (int(datetime.datetime.today().strftime("%d")) ==
             config().monthlybackup):
         ret = "monthly"
     return ret
Ejemplo n.º 10
0
def test_listJobs(test_config, tmp_path, capsys):
    config().lockfile = os.path.join(
        str(tmp_path),
        'autorsyncbackup.pid',
    )
    config().jobconfigdirectory = os.path.join(
        os.path.dirname(__file__),
        'etc/list',
    )
    config().jobspooldirectory = str(tmp_path)
    config().backupdir = os.path.join(
        str(tmp_path),
        'backups',
    )

    for subdir in [
            'daily',
            'weekly',
            'monthly',
    ]:
        path = os.path.join(
            config().backupdir,
            'localhost',
            subdir,
        )

        os.makedirs(path)

    listJobs('total')

    captured = capsys.readouterr()

    for title in [
            'Hostname',
            'Estimated total backup size',
            'Average backup size increase',
    ]:
        assert title in captured.out

    assert 'localhost' in captured.out
    assert '0 Bytes' in captured.out

    listJobs('average')

    captured = capsys.readouterr()

    for title in [
            'Hostname',
            'Estimated total backup size',
            'Average backup size increase',
    ]:
        assert title in captured.out

    assert 'localhost' in captured.out
    assert '0 Bytes' in captured.out
Ejemplo n.º 11
0
 def _send(self, subject, htmlbody, textbody):
     for to in config().backupmailrecipients:
         logger().info("Sent backup report to [%s] via SMTP:%s" %
                       (to, config().smtphost))
         message = Message(From=config().backupmailfrom,
                           To=to,
                           charset="utf-8")
         message.Subject = subject
         message.Html = htmlbody
         message.Body = textbody
         sender = Mailer(config().smtphost)
         sender.send(message)
Ejemplo n.º 12
0
 def _send(self, subject, htmlbody):
     for to in config().backupmailrecipients:
         logger().info("Sent backup report to [%s] via SMTP:%s" %
                       (to, config().smtphost))
         message = Message(From=config().backupmailfrom,
                           To=to,
                           charset="utf-8")
         message.Subject = subject
         message.Html = htmlbody
         message.Body = """This is an HTML e-mail with the backup overview, please use a HTML enabled e-mail client."""
         sender = Mailer(config().smtphost)
         sender.send(message)
Ejemplo n.º 13
0
def test_getMissingHosts(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().backupmailrecipients = ['*****@*****.**']

    se = statusemail()

    # no jobs nor history
    se.history = []

    hosts = se.getMissingHosts([])

    assert hosts == []

    # enabled job, no history
    path = os.path.join(
               os.path.dirname(__file__),
               'etc/localhost.job',
           )

    j = job(path)

    se.history = []

    hosts = se.getMissingHosts([j])

    assert len(hosts) == 1
    assert 'localhost' in hosts

    # enabled job, job in history
    se.history = [
                   {
                     'hostname': 'localhost',
                   },
                 ]

    hosts = se.getMissingHosts([j])

    assert hosts == []

    # disabled job, no history
    path = os.path.join(
               os.path.dirname(__file__),
               'etc/hostname-only.job',
           )

    j = job(path)

    se.history = []

    hosts = se.getMissingHosts([j])

    assert hosts == []
Ejemplo n.º 14
0
def test_readConfig_exceptions(monkeypatch):
    def mock_load(stream):
        return {}

    monkeypatch.setattr(yaml, 'load', mock_load)

    config().debugmessages = []

    assert len(config().debugmessages) == 0

    config().readConfig()

    assert len(config().debugmessages) > 1
Ejemplo n.º 15
0
def test_default_config(test_config):
    path = os.path.join(
        os.path.dirname(__file__),
        'etc/default-config.job',
    )

    j = job(path)

    assert j.enabled is True
    assert j.hostname == 'localhost'
    assert j.ssh is False
    assert j.ssh_sudo is False
    assert j.sshusername is None
    assert j.sshprivatekey is None
    assert j.rsyncusername == 'autorsyncbackup'
    assert j.rsyncpassword == 'fee-fi-fo-fum'
    assert j.rsyncshare == 'backup'
    assert j.port == 873
    assert j.backupdir == config().backupdir
    assert j.speedlimitkb == config().speedlimitkb
    assert j.dailyrotation == config().dailyrotation
    assert j.weeklyrotation == config().weeklyrotation
    assert j.monthlyrotation == config().monthlyrotation
    assert j.weeklybackup == config().weeklybackup
    assert j.monthlybackup == config().monthlybackup
    assert j.exclude == config().exclude
Ejemplo n.º 16
0
def test_checkRemoteHost_fail(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-fail.sh',
    )

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/rsync.sh',
    )

    ret = checkRemoteHost(path)

    assert ret is True
Ejemplo n.º 17
0
def test_readConfig_load_exception(monkeypatch, capsys):
    def mock_load(stream):
        raise IOError('Mock load failure')

    monkeypatch.setattr(yaml, 'load', mock_load)

    with pytest.raises(SystemExit) as e:
        config().readConfig()

    assert e.type == SystemExit
    assert e.value.code == 1

    captured = capsys.readouterr()

    assert 'Error while reading main config' in captured.out
Ejemplo n.º 18
0
def listJobs(sort):
    with Pidfile(config().lockfile, logger().debug, logger().error):
        # Run director
        directorInstance = director()
        jobs = directorInstance.getJobArray()
        sizes = {}
        averages = {}
        tot_size = 0
        tot_avg = 0
        for job in jobs:
            sizes[job.hostname], averages[
                job.hostname] = director().getBackupsSize(job)
        aux = sorted(sizes.items(), key=lambda x: x[1], reverse=True)
        if sort == 'average':
            aux = sorted(averages.items(), key=lambda x: x[1], reverse=True)
        x = PrettyTable([
            'Hostname', 'Estimated total backup size',
            'Average backup size increase'
        ])
        for elem in aux:
            hostname = elem[0]
            tot_size += sizes[hostname]
            tot_avg += averages[hostname]
            size = jinjafilters()._bytesToReadableStr(sizes[hostname])
            avg = jinjafilters()._bytesToReadableStr(averages[hostname])
            x.add_row([hostname, size, avg])
        tot_size = jinjafilters()._bytesToReadableStr(tot_size)
        tot_avg = jinjafilters()._bytesToReadableStr(tot_avg)
        x.add_row(['Total', tot_size, tot_avg])
        x.align = "l"
        x.padding_width = 1
        print(x)
Ejemplo n.º 19
0
    def executeRsyncViaSshProtocol(self, job, latest):
        directory = job.backupdir.rstrip('/') + "/" + job.hostname + "/current"
        sshoptions = "-e 'ssh -p%d -i %s -o \"PasswordAuthentication no\"'" % (job.port, job.sshprivatekey)
        options = "-aR %s --delete --stats --bwlimit=%d" % (sshoptions, job.speedlimitkb)
        exclude = self.generateExclude(job)
        if exclude:
            options += exclude
        include = self.generateInclude(job)

        # Link files to the same inodes as last backup to save disk space and boost backup performance
        if(latest):
            latest = "--link-dest=%s" % latest
        else:
            latest = ""

        # Generate rsync CLI command and execute it
        if(include):
            command = "%s %s %s %s %s" % (config().rsyncpath, options, latest, include, directory)
            logger().info("Executing rsync command (%s)" % command)
            errcode, stdout = self.executeCommand(command)
        else:
            stdout = "Include/Fileset is missing, Rsync is never invoked"
            errcode = 9

        job.backupstatus['rsync_stdout'] = stdout
        job.backupstatus['rsync_return_code'] = errcode
        return errcode, stdout
Ejemplo n.º 20
0
    def executeRsyncViaSshProtocol(self, job, latest):
        directory = job.backupdir.rstrip('/') + "/" + job.hostname + "/current"
        sudo_path = "--rsync-path='sudo rsync'" if job.ssh_sudo else ''
        sshoptions = ("-e 'ssh -p%d -i %s"
                      " -o \"PasswordAuthentication no\"' %s") % (
                          job.port, job.sshprivatekey, sudo_path)
        options = "-aR %s --delete --stats --bwlimit=%d" % (sshoptions,
                                                            job.speedlimitkb)
        exclude = self.generateExclude(job)
        if exclude:
            options += exclude
        include = self.generateInclude(job)

        # Link files to the same inodes as last backup to save disk space
        # and boost backup performance
        if (latest):
            latest = "--link-dest=%s" % latest
        else:
            latest = ""

        # Generate rsync CLI command and execute it
        if (include):
            command = "%s %s %s %s %s" % (config().rsyncpath, options, latest,
                                          include, directory)
            logger().info("Executing rsync command (%s)" % command)
            errcode, stdout = self.executeCommand(command)
        else:
            stdout = "Include/Fileset is missing, Rsync is never invoked"
            errcode = 9

        job.backupstatus['rsync_stdout'] = stdout
        job.backupstatus['rsync_return_code'] = errcode
        return errcode, stdout
Ejemplo n.º 21
0
    def executeRsyncViaRsyncProtocol(self, job, latest):
        """Execute rsync command via rsync protocol"""
        dir = job.backupdir.rstrip('/') + "/" + job.hostname + "/current"
        options = "--contimeout=5 -aR --delete --stats --bwlimit=%d" % job.speedlimitkb
        exclude = self.generateExclude(job)
        if exclude:
            options += exclude
        include = self.generateInclude(job)

        # Link files to the same inodes as last backup to save disk space and boost backup performance
        if(latest):
            latest = "--link-dest=%s" % latest
        else:
            latest = ""
        
        # Generate rsync CLI command and execute it
        if(include):
            password = "******"%s\"" % job.rsyncpassword
            rsyncCommand = "%s %s %s %s %s" % (config().rsyncpath, options, latest, include, dir)
            command = "%s; %s" % (password, rsyncCommand)
            logger().info("Executing rsync command (%s)" % rsyncCommand)
            errcode, stdout = self.executeCommand(command)
        else:
            stdout = "Include/Fileset is missing, Rsync is never invoked"
            errcode = 9

        job.backupstatus['rsync_stdout'] = stdout
        job.backupstatus['rsync_return_code'] = errcode
        return errcode, stdout
Ejemplo n.º 22
0
    def checkRemoteHostViaRsyncProtocol(self, job):
        """Check if remote host is up and able to accept
           connections with our credentials"""
        password = "******"%s\"" % job.rsyncpassword
        rsyncCommand = "%s --contimeout=5 rsync://%s@%s:%s/%s" % (config(
        ).rsyncpath, job.rsyncusername, job.hostname, job.port, job.rsyncshare)
        command = "%s; %s" % (password, rsyncCommand)
        logger().info("Executing rsync check (%s)" % rsyncCommand)
        errcode, stdout = self.executeCommand(command)

        if errcode != 0:
            logger().error("Error while connecting to host (%s) - %s" %
                           (job.hostname, stdout))
            job.backupstatus['startdatetime'] = int(time.time())
            job.backupstatus['enddatetime'] = int(time.time())
            job.backupstatus['hostname'] = job.hostname
            job.backupstatus['rsync_stdout'] = ("Error while connecting"
                                                " to host (%s) - %s") % (
                                                    job.hostname, stdout)
            ret = False
        else:
            ret = True
            logger().info(("Successfully connected to host"
                           " via rsync protocol (%s)") % job.hostname)

        return ret
Ejemplo n.º 23
0
    def executeRsyncViaRsyncProtocol(self, job, latest):
        """Execute rsync command via rsync protocol"""
        dir = job.backupdir.rstrip('/') + "/" + job.hostname + "/current"
        options = "--contimeout=5 -aR --delete --stats --bwlimit=%d" % (
            job.speedlimitkb)
        exclude = self.generateExclude(job)
        if exclude:
            options += exclude
        include = self.generateInclude(job)

        # Link files to the same inodes as last backup to save disk space
        # and boost backup performance
        if (latest):
            latest = "--link-dest=%s" % latest
        else:
            latest = ""

        # Generate rsync CLI command and execute it
        if (include):
            password = "******"%s\"" % job.rsyncpassword
            rsyncCommand = "%s %s %s %s %s" % (config().rsyncpath, options,
                                               latest, include, dir)
            command = "%s; %s" % (password, rsyncCommand)
            logger().info("Executing rsync command (%s)" % rsyncCommand)
            errcode, stdout = self.executeCommand(command)
        else:
            stdout = "Include/Fileset is missing, Rsync is never invoked"
            errcode = 9

        job.backupstatus['rsync_stdout'] = stdout
        job.backupstatus['rsync_return_code'] = errcode
        return errcode, stdout
Ejemplo n.º 24
0
def listJobs(sort):
    with Pidfile(config().lockfile, logger().debug, logger().error):
        # Run director
        directorInstance = director()
        jobs = directorInstance.getJobArray()
        sizes = {}
        averages = {}
        tot_size=0
        tot_avg=0
        for job in jobs:
            sizes[job.hostname], averages[job.hostname] = director().getBackupsSize(job)
        aux = sorted(sizes.items(), key=lambda x: x[1], reverse=True)
        if sort == 'average':
            aux = sorted(averages.items(), key=lambda x: x[1], reverse=True)
        x = PrettyTable(['Hostname', 'Estimated total backup size', 'Average backup size increase'])
        for elem in aux:
            hostname = elem[0]
            tot_size += sizes[hostname]
            tot_avg += averages[hostname] 
            size = jinjafilters()._bytesToReadableStr(sizes[hostname])
            avg = jinjafilters()._bytesToReadableStr(averages[hostname])
            x.add_row([hostname, size, avg])
        tot_size = jinjafilters()._bytesToReadableStr(tot_size)
        tot_avg = jinjafilters()._bytesToReadableStr(tot_avg)
        x.add_row(['Total', tot_size, tot_avg])
        x.align = "l"
        x.padding_width = 1
        print(x)
Ejemplo n.º 25
0
    def __init__(self, directorio):
        super(LogsController, self).__init__()
        self.directorio = directorio
        import logging
        from models.config import config

        config = config()
        LOG_FILENAME = config.pathLog + self.directorio + ".log"

        if "/" in directorio:
            carpeta = self.directorio.split("/")[0]
            if not os.path.isdir(config.pathLog + carpeta):
                os.makedirs(config.pathLog + carpeta)

        LEVELS = {
            "debug": logging.DEBUG,
            "info": logging.INFO,
            "warning": logging.WARNING,
            "error": logging.ERROR,
            "critical": logging.CRITICAL,
        }

        if len(sys.argv) > 1:
            level_name = sys.argv[1]
            level = LEVELS.get(level_name, logging.NOTSET)
            logging.basicConfig(filename=LOG_FILENAME, level=level)
Ejemplo n.º 26
0
def test_executeRsyncViaSshProtocol(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/ssh.job',
    )

    j = job(path)

    r = rsync()

    (status, stdout) = r.executeRsyncViaSshProtocol(j, None)

    assert status == 0
    assert 'sending incremental file list' in stdout
Ejemplo n.º 27
0
def test_checkRemoteHost_rsync_fail(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-fail.sh',
    )

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/rsync.job',
    )

    j = job(path)

    r = rsync()

    ret = r.checkRemoteHost(j)

    assert ret is False
    assert j.backupstatus['rsync_backup_status'] == int(ret)
Ejemplo n.º 28
0
def test_printOutput_no_history(test_config, tmp_path, capsys):
    config().jobspooldirectory = str(tmp_path)

    jobrunhistory(str(tmp_path), check=True)

    sc = statuscli()

    sc.printOutput('localhost')

    captured = capsys.readouterr()

    assert 'Could not find hostname' in captured.out
Ejemplo n.º 29
0
def test_runBackup_exception(test_config, tmp_path, monkeypatch):
    email_path = os.path.join(str(tmp_path), 'sudden-death.eml')

    def mock_send(self, message):
        with open(email_path, 'w') as f:
            f.write(message.as_string())

        return True

    monkeypatch.setattr(mailer.Mailer, 'send', mock_send)

    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )
    config().backupmailrecipients = ['*****@*****.**']

    with open(config().lockfile, 'w') as f:
        f.write(str(os.getpid()))

    path = os.path.join(
        os.path.dirname(__file__),
        'rsync.job',
    )

    runBackup(path, False)

    assert os.path.exists(email_path) is True
    assert os.path.exists(config().lockfile) is True

    if os.path.exists(config().lockfile):
        os.unlink(config().lockfile)
Ejemplo n.º 30
0
def test_executeRsync_ssh_latest(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/ssh.job',
    )

    j = job(path)

    r = rsync()

    ret = r.executeRsync(j, 'foo')

    assert ret is True
    assert j.backupstatus['rsync_backup_status'] == int(ret)
    assert j.backupstatus['rsync_return_code'] == 0
    assert 'sending incremental file list' in j.backupstatus['rsync_stdout']
Ejemplo n.º 31
0
def test_executeRsync_ssh_no_include(test_config, tmp_path):
    config().jobspooldirectory = str(tmp_path)
    config().rsyncpath = os.path.join(
        os.path.dirname(__file__),
        'bin/mock-rsync-ok.sh',
    )

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/ssh-no-include.job',
    )

    j = job(path)

    r = rsync()

    ret = r.executeRsync(j, 'foo')

    assert ret is False
    assert j.backupstatus['rsync_backup_status'] == 0
    assert j.backupstatus['rsync_return_code'] == 9
    assert ('Include/Fileset is missing, Rsync is never invoked'
            ) in j.backupstatus['rsync_stdout']
Ejemplo n.º 32
0
 def getJobArray(self, jobpath):
     jobArray = []
     if jobpath is None:
         dir = config().jobconfigdirectory.rstrip('/')
         if(os.path.exists(dir)):
             os.chdir(dir)
             for file in glob.glob("*.job"):
                 jobArray.append(job(dir + "/" + file))
         else:
             logger().error("Job directory (%s) doesn't exists, exiting (1)" % dir)
     else:
         jobArray.append(job(jobpath))
         
     return jobArray
Ejemplo n.º 33
0
def test_printOutput(test_config, tmp_path, capsys):
    config().jobspooldirectory = str(tmp_path)

    jrh = jobrunhistory(str(tmp_path), check=True)

    backupstatus = {
        'integrity_id': str(uuid.uuid1()),
        'hostname': 'localhost',
        'startdatetime': time.time(),
        'enddatetime': time.time(),
        'username': '******',
        'ssh': 'False',
        'share': 'backup',
        'include': '/etc',
        'exclude': '*.bak:.cache/*',
        'backupdir': '/tmp',
        'speedlimitkb': 1600,
        'filesrotate': None,
        'type': 'daily',
        'rsync_backup_status': 1,
        'rsync_return_code': 0,
        'rsync_pre_stdout': None,
        'rsync_stdout': 'foo\nbar\n',
        'rsync_number_of_files': 3278,
        'rsync_number_of_files_transferred': 1790,
        'rsync_total_file_size': 6249777,
        'rsync_total_transferred_file_size': 6213437,
        'rsync_literal_data': 6213437,
        'rsync_matched_data': 0,
        'rsync_file_list_size': 80871,
        'rsync_file_list_generation_time': 0.001,
        'rsync_file_list_transfer_time': 0,
        'rsync_total_bytes_sent': 39317,
        'rsync_total_bytes_received': 6430608,
        'sanity_check': 1,
    }

    hooks = []

    jrh.insertJob(backupstatus, hooks)

    sc = statuscli()

    sc.printOutput(backupstatus['hostname'])

    captured = capsys.readouterr()

    assert 'localhost' in captured.out
    assert 'Ok' in captured.out
Ejemplo n.º 34
0
    def getJobArray(self, jobpath=None):
        jobArray = []
        if jobpath is None:
            directory = config().jobconfigdirectory.rstrip('/')
            if (os.path.exists(directory)):
                os.chdir(directory)
                for filename in glob.glob("*.job"):
                    jobArray.append(job(directory + "/" + filename))
            else:
                logger().error(
                    "Job directory (%s) doesn't exists, exiting (1)" %
                    directory)
        else:
            jobArray.append(job(jobpath))

        return jobArray
Ejemplo n.º 35
0
def test_checkRemoteHost_ssh_fail(test_config, tmp_path, monkeypatch):
    def mock_connect(self, hostname, username=None, key_filename=None):
        raise IOError('Mock connection failed')

    monkeypatch.setattr(paramiko.SSHClient, 'connect', mock_connect)

    config().jobspooldirectory = str(tmp_path)

    path = os.path.join(
        os.path.dirname(__file__),
        'etc/ssh.job',
    )

    j = job(path)

    r = rsync()

    ret = r.checkRemoteHost(j)

    assert ret is False
    assert j.backupstatus['rsync_backup_status'] == int(ret)
Ejemplo n.º 36
0
 def __init__(self, dataset, is_cuda):
     """
     """
     super(gan_super, self).__init__()
     config_vars = config(dataset)
     self.lamb = config_vars.lamb
     self.key_dim = config_vars.key_dim
     self.mem_size = config_vars.mem_size
     self.z_dim = config_vars.z_dim
     self.c_dim = config_vars.c_dim
     self.fc_dim = config_vars.fc_dim
     self.f_dim = config_vars.f_dim
     self.lamb = config_vars.lamb
     self.key_dim = config_vars.key_dim
     self.mem_size = config_vars.mem_size
     self.c_dim = config_vars.c_dim
     self.fc_dim = config_vars.fc_dim
     self.f_dim = config_vars.f_dim
     self.choose_k = config_vars.choose_k
     self.z_dim = config_vars.z_dim
     self.alpha = config_vars.alpha
     self.num_steps = config_vars.num_steps
     self.is_cuda = is_cuda
Ejemplo n.º 37
0
def test_spam():
    assert re.match(r'^\d+$', str(config().spam()))
Ejemplo n.º 38
0
def test_readConfig(test_config, tmp_path):
    attributes = {
        'mainconfigpath': os.path.join(str(tmp_path), 'main.yaml'),
        'rsyncpath': '/usr/bin/rsync',
        'lockfile': os.path.join(str(tmp_path), 'autorsyncbackup.pid'),
        'jobconfigdirectory': str(tmp_path),
        'jobspooldirectory': str(tmp_path),
        'backupdir': str(tmp_path),
        'logfile': os.path.join(str(tmp_path), 'autorsyncbackup.log'),
        'speedlimitkb': 0,
        'dailyrotation': 8,
        'weeklyrotation': 5,
        'monthlyrotation': 13,
        'weeklybackup': 6,
        'monthlybackup': 1,
        'include': [],
        'exclude': [],
        'backupmailfrom': 'backup@%s' % socket.getfqdn(),
        'backupmailrecipients': [],
        'jobworkers': 3,
        'debuglevel': 0,
        'databaseretention': 540,
    }

    with open(attributes['mainconfigpath'], 'w') as f:
        for key in [
                'rsyncpath',
                'lockfile',
                'jobconfigdirectory',
                'jobspooldirectory',
                'backupdir',
                'logfile',
                'debuglevel',
        ]:
            f.write('%s: %s\n' % (key, attributes[key]))

    config().mainconfigpath = attributes['mainconfigpath']
    config().backupmailrecipients = []

    config().readConfig()

    assert config().mainconfigpath == attributes['mainconfigpath']
    assert config().rsyncpath == attributes['rsyncpath']
    assert config().lockfile == attributes['lockfile']
    assert config().jobconfigdirectory == attributes['jobconfigdirectory']
    assert config().jobspooldirectory == attributes['jobspooldirectory']
    assert config().backupdir == attributes['backupdir']
    assert config().logfile == attributes['logfile']
    assert config().speedlimitkb == attributes['speedlimitkb']
    assert config().dailyrotation == attributes['dailyrotation']
    assert config().weeklyrotation == attributes['weeklyrotation']
    assert config().monthlyrotation == attributes['monthlyrotation']
    assert config().weeklybackup == attributes['weeklybackup']
    assert config().monthlybackup == attributes['monthlybackup']
    assert config().include == attributes['include']
    assert config().exclude == attributes['exclude']
    assert config().backupmailfrom == attributes['backupmailfrom']
    assert config().backupmailrecipients == attributes['backupmailrecipients']
    assert config().jobworkers == attributes['jobworkers']
    assert config().debuglevel == attributes['debuglevel']
    assert config().databaseretention == attributes['databaseretention']
Ejemplo n.º 39
0
    parser.add_option("-c", "--main-config", dest="mainconfig", metavar="path_to_main.yaml",
        help="set different main config file, default value = /etc/autorsyncbackup/main.yaml", 
        default="/etc/autorsyncbackup/main.yaml")
    parser.add_option("-d", "--dry-run", action="store_true", dest="dryrun", default=False,
        help="do not invoke rsync, only perform a login attempt on remote host")
    parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
        help="Write logoutput also to stdout")
    parser.add_option("-j", "--single-job", metavar="path_to_jobfile.job", dest="job", 
        help="run only the given job file")

    (options, args) = parser.parse_args()    
    return options

if __name__ == "__main__":
    options = setupCliArguments()
    config(options.mainconfig)
    
    # Welcome message
    if options.verbose:
        print "Starting AutoRsyncBackup"
    
    # Set logpath
    logger(config().logfile)
    logger().setDebuglevel(config().debuglevel)
    logger().setVerbose(options.verbose)
    for msg in config().debugmessages:
        logger().debug(msg)
    
    try:
        with Pidfile(config().lockfile, logger().debug, logger().error):
            # Run director
Ejemplo n.º 40
0
def runBackup(jobpath, dryrun):
    """ Start backup run """
    exitFlag = threading.Event()
    queueLock = threading.Lock()
    workQueue = queue.Queue(0)

    try:
        with Pidfile(config().lockfile, logger().debug, logger().error):
            # Run director
            directorInstance = director()
            jobs = directorInstance.getJobArray(jobpath)

            # Start threads
            threads = []
            if not dryrun:
                for i in range(0, config().jobworkers):
                    thread = jobThread(i, exitFlag, queueLock, directorInstance, workQueue)
                    thread.start()
                    threads.append(thread)

            # Execute jobs
            queueLock.acquire()
            durationstats = {}
            durationstats['backupstartdatetime'] = int(time.time())
            for job in jobs:
                if(job.enabled):
                    if directorInstance.checkRemoteHost(job):
                        if not dryrun:
                            # Add to queue
                            workQueue.put(job)
                    else:
                        jobrunhistory().insertJob(job.backupstatus, None)
            queueLock.release()
            # Wait for queue to empty
            while not workQueue.empty():
                time.sleep(0.1)

            # Notify threads it's time to exit
            exitFlag.set()

            # Wait for all threads to complete
            for t in threads:
                t.join()
            durationstats['backupenddatetime'] = int(time.time())

            if not dryrun:
                # Do housekeeping
                durationstats['housekeepingstartdatetime'] = int(time.time())
                for job in jobs:
                    if(job.enabled):
                        if job.backupstatus['rsync_backup_status'] == 1:
                            directorInstance.backupRotate(job)
                jobrunhistory().deleteHistory()
                durationstats['housekeepingenddatetime'] = int(time.time())

                # Sent status report
                statusemail().sendStatusEmail(jobs, durationstats)
#            else:
#                for job in jobs:
#                    job.showjob()
    except ProcessRunningException as m:
        statusemail().sendSuddenDeath(m)
        logger().error(m)
Ejemplo n.º 41
0
    directorInstance = director()
    jobs = directorInstance.getJobArray(jobpath)
    return not directorInstance.checkRemoteHost(jobs[0])

def getLastBackupStatus(hostname):
    """ Get status of last backup run of given hostname and exit with exitcode 0 (success) or 1 (error) """
    return statuscli().printOutput(hostname)

if __name__ == "__main__":
    """ Start application """
    # Initialise variables
    checkSingleHost = False

    # Get CLI options and Config
    options = setupCliArguments()
    config(options.mainconfig)

    # Welcome message
    if options.verbose:
        print("Starting AutoRsyncBackup")

    # Only check if host is reachable, set appropriate settings
    if options.job and options.dryrun:
        checkSingleHost = True
        options.verbose = True
        config().debuglevel = 2

    # Set logpath
    logger(config().logfile)
    logger().setDebuglevel(config().debuglevel)
    logger().setVerbose(options.verbose)