Esempio n. 1
0
def main():
    fh = FileHelper()
    dbh = DBHelper()

    sql_savedbuffer = "select * from ITEMS where buffer_status = 88 order by id"
    sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 89 WHERE ID = %s"
    usage = fh.bufferusage()
    print(usage)

    try:
        db = dbh.getDictCursor()
        cursor = db["cursor"]
        cursor.execute(sql_savedbuffer)
        result = cursor.fetchall()

        for file in result:
            # if usage <= 0.8:
            #    break
            fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
            usage = fh.bufferusage()
            cursor.execute(sql_updatebufferstatus, (file["ID"]))
            print("removed %s from buffer for BG %s " %
                  (file["HASH"], file["BACKUPGROUP_ID"]))
            print(usage)

    except Exception as e:
        print("Exception")  # sql error
        print(e)
        tb = e.__traceback__
        traceback.print_tb(tb)
Esempio n. 2
0
 def __init__(self, config, environ, logdispatcher, statechglogger):
     '''
     Constructor
     '''
     RuleKVEditor.__init__(self, config, environ, logdispatcher,
                           statechglogger)
     self.rulenumber = 169
     self.rulename = 'DisableAutoLogin'
     self.formatDetailedResults("initialize")
     self.applicable = {
         'type': 'white',
         'os': {
             'Mac OS X': ['10.15', 'r', '10.15.10']
         }
     }
     self.mandatory = True
     self.rootrequired = True
     self.files = {
         "kcpassword": {
             "path": "/etc/kcpassword",
             "remove": True,
             "content": None,
             "permissions": None,
             "owner": None,
             "group": None,
             "eventid": str(self.rulenumber).zfill(4) + "kcpassword"
         }
     }
     self.addKVEditor(
         "DisableAutoLogin", "defaults",
         "/Library/Preferences/com.apple.loginwindow", "", {
             "autoLoginUser": [
                 re.escape(
                     "The domain/default pair of (/Library/Preferences/com.apple.loginwindow, autoLoginUser) does not exist"
                 ), None
             ]
         }, "present", "",
         "This variable is to determine whether or not to " +
         "disable auto login", None, False, {})
     self.fh = FileHelper(self.logdispatch, self.statechglogger)
     self.ch = CommandHelper(self.logdispatch)
     for filelabel, fileinfo in sorted(self.files.items()):
         self.fh.addFile(filelabel, fileinfo["path"], fileinfo["remove"],
                         fileinfo["content"], fileinfo["permissions"],
                         fileinfo["owner"], fileinfo["group"],
                         fileinfo["eventid"])
     self.sethelptext()
Esempio n. 3
0
    def cleanupBuffer(self):
        fh = FileHelper()
        dbh = DBHelper()
        logger = self.log

        sql_savedbuffer = "select * from ITEMS where (DRIVE1_ID > 0  and DRIVE2_ID > 0) and buffer_status = 1 order by id "
        sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 2 WHERE ID = %s"
        usage = fh.bufferusage()
        print(usage)

        try:
            db = dbh.getDictCursor()
            cursor = db["cursor"]
            cursor.execute(sql_savedbuffer)
            result = cursor.fetchall()

            for file in result:
                if usage <= 0.8:
                    break
                fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
                usage = fh.bufferusage()
                cursor.execute(sql_updatebufferstatus, (file["ID"]))
                print("removed %s from buffer for BG %s " %
                      (file["HASH"], file["BACKUPGROUP_ID"]))
                print(usage)
                logger.info({
                    'action': 'Removed from Buffer',
                    'hash': file["HASH"],
                    'bachup_group': file["BACKUPGROUP_ID"],
                    "size": file["FILESIZE"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
Esempio n. 4
0
class ScanFiles:
    backup_group = 1
    run_id = -1
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_helper = DBHelper()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]
    file_helper = FileHelper()
    file_filter = []
    dir_filter = []

    def __init__(self, backup_group_id):
        self.backup_group = backup_group_id
        self.create_run()
        self.load_filters()

    def load_filters(self):
        cursor = self.cursor
        sql_loadfilefilter = 'Select expression from FILTERS ' \
                             'where (BACKUPGROUP_ID = %s OR BACKUPGROUP_ID is null) ' \
                             'and file = 1'
        sql_loaddirfilter = 'Select expression from FILTERS ' \
                            'where (BACKUPGROUP_ID = %s OR BACKUPGROUP_ID is null) ' \
                            'and dir = 1'
        try:
            cursor.execute(sql_loaddirfilter, (self.backup_group))
            result = cursor.fetchall()
            self.dir_filter = self.compile_filters(result)

            cursor.execute(sql_loadfilefilter, (self.backup_group))
            result = cursor.fetchall()
            self.file_filter = self.compile_filters(result)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def compile_filters(self, result_set):
        result = []
        for data in result_set:
            raw_filter = '^(?=.*' + data["expression"].replace('*',
                                                               '(.*)') + ').*'
            print(raw_filter)
            filter = re.compile(raw_filter)
            result.append(filter)
        return result

    def check_filter(self, filters, path):
        for filter in filters:
            match = filter.match(path)
            if match:
                return True
        return False

    def create_run(self):
        cursor = self.cursor

        sql = "INSERT INTO RUNS (BACKUPGROUP_ID, TIME_STARTED) VALUES (%s, CURRENT_TIMESTAMP)"
        try:
            cursor.execute(sql, (self.backup_group))
            self.run_id = cursor.lastrowid

            self.log.info({
                'action': 'Create Run_ID',
                'run_id': self.run_id,
                'backup_group': self.backup_group
            })
        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def scan_for_files(self):
        cursor = self.cursor

        sql_insert_file = 'INSERT IGNORE INTO FILES (backupgroup_id, path, path_hash) ' \
                          'VALUES (%s, %s, md5(concat(%s, "-", %s)))'
        sql_insert_bu = """
        INSERT INTO BACKUPITEMS (RUN_ID, FILE_ID, FILESIZE, LASTMODIFIED, BACKUPGROUP_ID)
        Select %s, id, %s, %s, %s
        from FILES where path_hash = md5(concat(%s, '-', %s))
        """

        dirs = self.get_basedirs(cursor)

        # ---------------- Scan Dirs
        totalfiles = 0
        for dir in dirs:
            filesperdir = 0
            filterdfiles = 0
            started = int(round(time.time() * 1000))
            self.log.info({
                'action': 'Start scanning Dir',
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'dir': dir['PATH']
            })
            for root, dirs, files in os.walk(dir['PATH']):
                for file in files:
                    filesperdir += 1
                    file_hash = ""

                    if filesperdir % 1000 == 0:
                        cursor = self.new_connection()

                    try:
                        filedata = {}
                        filedata['filepath'] = os.path.join(root, file)
                        filedata['mtime'] = int(
                            round(
                                os.path.getmtime(filedata['filepath']) * 1000))
                        filedata['size'] = os.stat(
                            filedata['filepath']).st_size

                        # file filter
                        filename = self.file_helper.get_filename(
                            filedata['filepath'])
                        if self.check_filter(self.file_filter, filename):
                            print("Filtered (file) out " +
                                  filedata['filepath'] + ' (' + filename + ')')
                            filterdfiles += 1
                            continue

                        # dir filter
                        parent = self.file_helper.get_parent(
                            filedata['filepath'])
                        if self.check_filter(self.dir_filter, parent):
                            print("Filtered (dir) out " +
                                  filedata['filepath'] + ' (' + parent + ')')
                            filterdfiles += 1
                            continue

                        totalfiles += 1
                        with warnings.catch_warnings():
                            warnings.simplefilter("ignore")
                            cursor.execute(
                                sql_insert_file,
                                (self.backup_group, filedata['filepath'],
                                 self.backup_group, filedata['filepath']))
                        cursor.execute(
                            sql_insert_bu,
                            (self.run_id, filedata['size'], filedata['mtime'],
                             self.backup_group, self.backup_group,
                             filedata['filepath']))

                        new_id = cursor.lastrowid

                        affected_rows, file_hash = self.map_unchganged(
                            cursor, filedata, new_id)

                        if affected_rows > 0:
                            self.log.debug({
                                'action': 'Unchanged File',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'count': affected_rows
                            })
                        else:
                            file_hash = self.hash_match_or_create_item(
                                cursor, filedata, new_id)

                        if file_hash is not None:

                            buffer_status = self.check_buffer_status(
                                cursor, file_hash)

                            if buffer_status <= 0:

                                self.buffer_file(cursor, filedata, file_hash,
                                                 new_id)
                            else:
                                self.log.debug({
                                    'action': 'File already Buffered',
                                    'path': filedata['filepath'],
                                    'run_id': self.run_id,
                                    'backup_group': self.backup_group,
                                    'hash': file_hash,
                                    'backup item': new_id
                                })

                    except Exception as e:
                        cursor = self.new_connection()
                        print("Exception")  # sql error
                        print(e)
                        tb = e.__traceback__
                        traceback.print_tb(tb)

                    if totalfiles % 10000 == 0:
                        print("%s Files Scanned. Last Scanned: %s" %
                              (totalfiles, filedata))

                    # print(filedata)
            finished = int(round(time.time() * 1000))
            duration = finished - started
            divider = 1
            if filesperdir > 0:
                divider = filesperdir
            per_file = duration / divider
            self.log.info({
                'action': 'End scanning Dir',
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'dir': dir['PATH'],
                'count': filesperdir,
                'duration': duration,
                'per_file': per_file,
                'filtered': filterdfiles
            })
            cursor = self.new_connection()

        self.log.info({
            'action': 'End scanning Dirs',
            'run_id': self.run_id,
            'backup_group': self.backup_group,
            'count': totalfiles
        })

        # ------------------ SET Hashing Complete
        cursor = self.new_connection()
        sql_sethashingsuccess = 'UPDATE RUNS SET SUCESSFUL = 1 WHERE ID = %s'

        try:
            cursor.execute(sql_sethashingsuccess, (self.run_id))
            self.log.info({
                'action': 'Scanning and Hashing successful',
                'run_id': self.run_id,
                'backup_group': self.backup_group
            })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def buffer_file(self, cursor, filedata, new_hash, new_id):
        sql_update_buffer_status = "Update ITEMS Set BUFFER_STATUS=%s where hash = %s and backupgroup_id = %s"
        sql_check_hash_exists = "select count(*) as count, max(id) as item_id from ITEMS where hash = %s and backupgroup_id = %s"
        sql_updatebuitem = 'update BACKUPITEMS  set item_id  = %s, hash = %s where id = %s '
        # Build Target Path
        bufferpath = self.file_helper.buffer_path_from_hash(
            new_hash, self.backup_group)
        self.file_helper.create_parent_if_not_exist(bufferpath)
        # Copy File
        self.file_helper.copy_file(filedata['filepath'], bufferpath)
        # Validate Hash
        tgt_hash = self.file_helper.hash_file(bufferpath)
        if tgt_hash == new_hash:
            # Set Bufferstatus to 1
            cursor.execute(sql_update_buffer_status,
                           (1, new_hash, self.backup_group))
            self.log.info({
                'action': 'File Buffered Successfully',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'hash': new_hash,
                'backup item': new_id
            })

        else:
            # hash original again
            src_hash = self.file_helper.hash_file(filedata['filepath'])

            if src_hash != tgt_hash:
                # delete target and  set buffer code to -1
                self.file_helper.delete_file(bufferpath)
                cursor.execute(sql_update_buffer_status,
                               (-1, new_hash, self.backup_group))
                self.log.info({
                    'action': 'Could not Buffer: Fast Changing',
                    'path': filedata['filepath'],
                    'run_id': self.run_id,
                    'backup_group': self.backup_group,
                    'hash': new_hash,
                    'backup item': new_id
                })
            else:
                # Check if entry for new Hash exists
                cursor.execute(sql_check_hash_exists,
                               (tgt_hash, self.backup_group))
                rs2 = cursor.fetchone()
                if rs2["count"] == 0:
                    # set orig Item Entry to -2
                    cursor.execute(sql_update_buffer_status,
                                   (-2, new_hash, self.backup_group))
                    # create items entry
                    sql_insertitems = "Insert into ITEMS(backupgroup_id, hash, filesize) VALUES (%s, %s, %s)"
                    cursor.execute(sql_insertitems,
                                   (self.backup_group, tgt_hash,
                                    os.stat(bufferpath).st_size))
                    # move file
                    tgtpath2 = self.file_helper.buffer_path_from_hash(
                        tgt_hash, self.backup_group)
                    self.file_helper.create_parent_if_not_exist(tgtpath2)
                    self.file_helper.move_file(bufferpath, tgtpath2)
                    moved_hash = self.file_helper.hash_file(tgtpath2)
                    if tgt_hash == moved_hash:
                        # update BUI with new item and set buffer_status = 1
                        cursor.execute(sql_updatebuitem,
                                       (rs2["item_id"], tgt_hash, new_id))
                        cursor.execute(sql_update_buffer_status,
                                       (1, tgt_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'File Buffered Successfully but in Changed Version',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': tgt_hash,
                            'old hash': new_hash,
                            'backup item': new_id
                        })
                    else:
                        # Delete file and update  item bufferstatus -4
                        self.file_helper.delete_file(tgtpath2)
                        cursor.execute(sql_update_buffer_status,
                                       (-4, new_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'Could not Buffer: Changed and Fast Changing',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': new_hash,
                            'backup item': new_id
                        })
                else:
                    buffer_status = self.check_buffer_status(tgt_hash)
                    if buffer_status > 0:
                        # delete target and change bui entry
                        self.file_helper.delete_file(bufferpath)
                        cursor.execute(sql_updatebuitem,
                                       (rs2["item_id"], tgt_hash, new_id))
                        cursor.execute(sql_update_buffer_status,
                                       (1, tgt_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'File Buffered Successfully Changed Version already in Buffer',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': tgt_hash,
                            'old hash': new_hash,
                            'backup item': new_id
                        })
                    else:
                        # move target
                        tgtpath2 = self.file_helper.buffer_path_from_hash(
                            tgt_hash, self.backup_group)
                        self.file_helper.create_parent_if_not_exist(tgtpath2)
                        self.file_helper.move_file(bufferpath, tgtpath2)
                        moved_hash = self.file_helper.hash_file(tgtpath2)
                        # validate new target
                        if tgt_hash == moved_hash:
                            cursor.execute(sql_updatebuitem,
                                           (rs2["item_id"], tgt_hash, new_id))
                            self.log.info({
                                'action':
                                'File Buffered Successfully Changed Version in existing Item',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'hash': tgt_hash,
                                'old hash': new_hash,
                                'backup item': new_id
                            })
                        else:
                            # Delete target and set buffer status -3
                            self.file_helper.delete_file(tgtpath2)
                            cursor.execute(sql_update_buffer_status,
                                           (-3, new_hash, self.backup_group))
                            self.log.info({
                                'action':
                                'Could not Buffer: Fast Changing in existing item',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'hash': new_hash,
                                'backup item': new_id
                            })

    def check_buffer_status(self, cursor, new_hash):
        sql_check_buffer_status = "SELECT BUFFER_STATUS FROM ITEMS I where hash = %s and backupgroup_id = %s"
        # print('[%s | %s]' % (new_hash, self.backup_group))
        cursor.execute(sql_check_buffer_status, (new_hash, self.backup_group))
        rs = cursor.fetchone()
        buffer_status = rs["BUFFER_STATUS"]
        return buffer_status

    def hash_match_or_create_item(self, cursor, filedata, new_id):
        sql_insertitems = "Insert into ITEMS(backupgroup_id, hash, filesize) VALUES (%s, %s, %s)"
        # set hash and create item where necesarry                            #
        sql_sethash = 'UPDATE BACKUPITEMS SET HASH = %s WHERE id = %s'
        new_hash = self.file_helper.hash_file(filedata['filepath'])
        if new_hash is None:
            self.log.warn({
                'action': 'Could not hash',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
            })
            return new_hash
        cursor.execute(sql_sethash, (new_hash, new_id))
        sql_matchwithitems = """
                                     UPDATE BACKUPITEMS t
                                     inner join BACKUPITEMS b
                                     on t.id = b.id
                                     inner join ITEMS i
                                     on i.hash = b.hash
                                     SET b.ITEM_ID = i.id
                                     where b.id = %s and i.backupgroup_id = %s
                                 """
        matched = cursor.execute(sql_matchwithitems,
                                 (new_id, self.backup_group))
        if matched == 0:

            inserted = cursor.execute(
                sql_insertitems,
                (self.backup_group, new_hash, filedata['size']))
            matched = cursor.execute(sql_matchwithitems,
                                     (new_id, self.backup_group))
        else:
            self.log.info({
                'action': 'File Unchanged',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'count': matched,
                'hash': new_hash
            })
        return new_hash

    def map_unchganged(self, cursor, filedata, new_id):
        # check if file is unchanges
        sql_updateunchanged = """
                                           Update BACKUPITEMS t
                                           inner join
                                           BACKUPITEMS as n
                                           on  t.id = n.id
                                           inner join BACKUPITEMS as c
                                           on c.file_id = n.file_id and c.FILESIZE = n.FILESIZE
                                           and c.lastmodified = n.lastmodified
                                           inner join (select max(id) as id from BACKUPITEMS
                                           where file_id =
                                              (Select id from FILES where path_hash = md5(concat(%s, '-', %s)))
                                           and hash is not null) x
                                           on c.id = x.id
                                           SET t.item_id = c.item_id, t.hash=c.hash
                                           where n.id = %s
                                       """
        sql_gethash = "select hash from BACKUPITEMS as b where b.id = %s"
        affected_rows = cursor.execute(
            sql_updateunchanged,
            (self.backup_group, filedata['filepath'], new_id))
        mapped_hash = None
        if affected_rows > 0:
            cursor.execute(sql_gethash, new_id)
            rs = cursor.fetchone()
            mapped_hash = rs["hash"]
        return affected_rows, mapped_hash

    def get_basedirs(self, cursor):
        sql_dirs = 'Select PATH from DIRECTORY where BACKUPGROUP_ID = %s'
        # ---------------- Get Rlevant Base Dirs
        try:
            cursor.execute(sql_dirs, (self.backup_group))
            dirs = cursor.fetchall()
        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
        return dirs

    def new_connection(self):
        self.db_helper.close(self.db_data)
        self.db_data = self.db_helper.getDictCursor()
        self.cursor = self.db_data["cursor"]
        return self.cursor
Esempio n. 5
0
def main():
    # get db cursor
    db_helper = DBHelper()
    file_helper = FileHelper()
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]

    log.info({
        'action': 'Restore started',
        'BMU_PATH_SEARCH': os.getenv('BMU_PATH_SEARCH'),
        'BMU_PATH_REPLACE': os.getenv('BMU_PATH_REPLACE'),
        'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
        'BMU_PATH_DELIM': os.getenv('BMU_PATH_DELIM'),
        'BMU_PATH_DEPTH': os.getenv('BMU_PATH_DEPTH'),
        'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT')
    })

    sql = """
        select REPLACE(PATH, '%s', '%s') AS PATH, d.NAME as DRIVE, FILESIZE, i.HASH from BACKUPITEMS b
        inner join ITEMS i
        on b.item_id = i.id
        inner join DRIVES d
        on COALESCE(DRIVE1_ID, DRIVE2_ID) = d.ID
        where b.run_id = %s
        and SUBSTRING_INDEX(path,'%s',%s) = '%s'
        order by COALESCE(DRIVE1_ID, DRIVE2_ID) asc, filesize desc
    """ % (os.getenv('BMU_PATH_SEARCH'), os.getenv('BMU_PATH_REPLACE'),
           os.getenv('BMU_PATH_RUNID'), os.getenv('BMU_PATH_DELIM'),
           os.getenv('BMU_PATH_DEPTH'), os.getenv('BMU_PATH_SELECT'))
    print(sql)
    cursor.execute(sql)
    files_to_restore = cursor.fetchall()

    count = 0
    errors = ""
    error_list = []
    for file_to_restore in files_to_restore:
        # print(file_to_restore)
        unescaped_path = file_to_restore['PATH'].replace('\\\\', '\\')
        # dirty hack: adds second backslash if path starts with backslash
        if str.startswith(unescaped_path, '\\'):
            unescaped_path = '\\' + unescaped_path
        file_to_restore['PATH'] = unescaped_path
        tgt = file_to_restore['PATH']
        src = file_helper.path_from_hash(os.getenv('BMU_INT_ROOT'),
                                         file_to_restore['DRIVE'],
                                         file_to_restore['HASH'])
        if not file_helper.file_exists(tgt):
            while not file_helper.file_exists(src):
                print("Missing: " + src)
                input("Press Enter to continue...")
            if file_helper.file_exists(src):
                try:
                    file_helper.create_parent_if_not_exist(tgt)
                    file_helper.copy_file(src, tgt)
                except Exception as e:
                    print("Exception")  # sql error
                    print(e)
                    tb = e.__traceback__
                    traceback.print_tb(tb)
                    errors += "Could not Copy " + src + " to " + tgt + ": " + str(
                        e)
                    error_list.append({
                        "source": src,
                        "target": tgt,
                        "exception": str(e)
                    })
                count += 1
                print(tgt + " sucessfully restored [" + str(count) + "]")
        else:
            print(tgt + "allready exists, skipping")
        if count % 1000 == 0:
            log.info({
                'action': 'Restore finished',
                'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT'),
                'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
                'count': count,
                'total': len(files_to_restore)
            })

    log.info({
        'action': 'Restore finished',
        'BMU_PATH_SEARCH': os.getenv('BMU_PATH_SEARCH'),
        'BMU_PATH_REPLACE': os.getenv('BMU_PATH_REPLACE'),
        'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
        'BMU_PATH_DELIM': os.getenv('BMU_PATH_DELIM'),
        'BMU_PATH_DEPTH': os.getenv('BMU_PATH_DEPTH'),
        'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT'),
        'count': count,
        'errors': error_list
    })
Esempio n. 6
0
def main():
    fh = FileHelper()
    src = 'D:\\backup\\test\\dir1234\\test.txt'

    print('------')
    print("get parent test")
    parent = fh.get_parent(src)
    print(parent)
    print()

    print('------')
    print("get filename test")
    basename = fh.get_filename(src)
    print(basename)
    print()

    print('------')
    print("hash test")
    fhash = fh.hash_file(src)
    print(fhash)
    print()

    print('------')
    print("path from hash test")
    tgt = fh.path_from_hash('D:\\backup\\test', 'TST0001', fhash)
    print(tgt)
    print()

    print('------')
    print("move test")
    fh.move_file(src, tgt)
    if os.path.isfile(tgt):
        print("OK: Target Exists")
    else:
        print("Error: Target does not Exists")
    if os.path.isfile(src):
        print("Error: Source Exists")
    else:
        print("OK: Source does not Exists")
    print()

    print('------')
    print("copy test")
    fh.copy_file(tgt, src)
    if os.path.isfile(tgt):
        print("OK: Target Exists")
    else:
        print("Error: Target does not Exists")
    if os.path.isfile(src):
        print("OK: Source Exists")
    else:
        print("Error: Source does not Exists")
    print()

    print('------')
    print("delete test")
    fh.delete_file(tgt)
    if os.path.isfile(tgt):
        print("Error: Target Exists")
    else:
        print("OK: Target does not Exists")
    if os.path.isfile(src):
        print("OK: Source Exists")
    else:
        print("Error: Source does not Exists")
    print()

    print('------')
    print("create parent test")
    basedir = 'D:\\backup\\test\\'
    dir1 = basedir + ''.join(
        random.choices(string.ascii_uppercase + string.digits, k=5))
    dir2 = dir1 + '\\' + ''.join(
        random.choices(string.ascii_uppercase + string.digits, k=5))
    filename = dir2 + "\\" + "test.txt"
    print(dir1)
    print(dir2)
    print(filename)
    fh.create_parent_if_not_exist(filename)
    if os.path.isdir(dir1):
        print("OK: Dir1 Exists")
    else:
        print("Error: Dir1 does not Exists")
    if os.path.isfile(src):
        print("OK: Dir2 Exists")
    else:
        print("Error: Dir2 does not Exists")
Esempio n. 7
0
def main():
    fh = FileHelper()
    dbh = DBHelper()
    sql_runs = "Select * from RUNS " \
               "where id in (select distinct run_id from BACKUPITEMS) " \
               "and TIME_STARTED < DATE_SUB(NOW(), INTERVAL 60 DAY)"

    runs = []
    target_folder = 'b:/current/archive/bmu/'
    try:
        db = dbh.getDictCursor()
        cursor = db["cursor"]
        cursor.execute(sql_runs)
        result = cursor.fetchall()
        for r in result:
            print(r)
            runs.append(r["ID"])

    except Exception as e:
        print("Exception")  # sql error
        print(e)
        tb = e.__traceback__
        traceback.print_tb(tb)

    for run_id in runs:
        print("Run: %s" % run_id)

        target_file = "%s%s.bmu" % (target_folder, run_id)

        sql_run = "select * from RUNS where id = %s"
        sql_bui = "SELECT B.*, F.path FROM BACKUPITEMS B inner join FILES F ON B.file_id = F.id where run_id = %s"

        sql_DELETE = "DELETE FROM BACKUPITEMS WHERE run_id = %s"

        rundata = {}

        try:
            cursor.execute(sql_run, run_id)
            result = cursor.fetchone()
            rundata["run"] = result

            cursor.execute(sql_bui, run_id)
            result = cursor.fetchall()

            buis = []
            for bui in result:
                buis.append(bui)

            # print(buis)
            rundata["backupitems"] = buis

            # print(rundata)
            fh.save_dict_to_file(rundata, target_file)

            cursor.execute(sql_DELETE, run_id)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
Esempio n. 8
0
    def __init__(self, config, environ, logdispatcher, statechglogger):
        """

        @param config:
        @param environ:
        @param logdispatcher:
        @param statechglogger:
        """

        Rule.__init__(self, config, environ, logdispatcher,
                              statechglogger)
        self.rulenumber = 255
        self.rulename = 'ConfigureKerberos'
        self.formatDetailedResults("initialize")
        self.mandatory = True
        self.sethelptext()
        self.rootrequired = True
        self.guidance = []
        self.applicable = {'type': 'white', 'family': 'linux',
                           'os': {'Mac OS X': ['10.15', 'r', '10.15.10']}}
        # This if/else statement fixes a bug in Configure Kerberos that
        # occurs on Debian systems due to the fact that Debian has no wheel
        # group by default.
        if self.environ.getosfamily() == 'darwin':
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": MACKRB5,
                           "permissions": 0o644,
                           "owner": os.getuid(),
                           "group": "wheel",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"},
                          "edu.mit.Kerberos":
                          {"path": "/Library/Preferences/edu.mit.Kerberos",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "Kerberos"},
                          "edu.mit.Kerberos.krb5kdc.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.krb5kdc.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "krb5kdc"},
                          "kerb5.conf":
                          {"path": "/etc/kerb5.conf",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) + "kerb5"},
                          "edu.mit.Kerberos.kadmind.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.kadmind.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "kadmind"},
                          }
        else:
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": LINUXKRB5,
                           "permissions": 0o644,
                           "owner": "root",
                           "group": "root",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"}}
        self.ch = CommandHelper(self.logdispatch)
        self.fh = FileHelper(self.logdispatch, self.statechglogger)
        if self.environ.getosfamily() == 'linux':
                self.ph = Pkghelper(self.logdispatch, self.environ)
        self.filepathToConfigure = []
        for filelabel, fileinfo in sorted(self.files.items()):
            if fileinfo["remove"]:
                msg = "Remove if present " + str(fileinfo["path"])
            else:
                msg = "Add or update if needed " + str(fileinfo["path"])
            self.filepathToConfigure.append(msg)
            self.fh.addFile(filelabel,
                            fileinfo["path"],
                            fileinfo["remove"],
                            fileinfo["content"],
                            fileinfo["permissions"],
                            fileinfo["owner"],
                            fileinfo["group"],
                            fileinfo["eventid"]
                            )
        # Configuration item instantiation
        datatype = "bool"
        key = "CONFIGUREFILES"
        instructions = "When Enabled will fix these files: " + \
            str(self.filepathToConfigure)
        default = True
        self.ci = self.initCi(datatype, key, instructions, default)
            statemachine.on_event({'event': 'msg_sent', 'msg': sendmsg})
            return (client, sendmsg)
        segnum = int(
            msg[1:2])  # struct of our segment msg = s<segnum>:<payload>
        payload = msg[3:]
        filehelper.writetofile(payload)
        sendmsg = "ACK:s" + segnum
        sock.sendto(sendmsg.encode(), client)
        statemachine.on_event({'event': 'msg_sent', 'msg': sendmsg})
        return (client, sendmsg)


# TODO: add a state where if an error happens that state machine
#       cant handle, the error state will log that error

filehelper = FileHelper()

server_socket = socket(AF_INET, SOCK_DGRAM)
server_socket.bind(server_addr)
server_socket.setblocking(False)

read_set = set([server_socket])
write_set = set()
error_set = set([server_socket])

statemachine = ServerStateMachine()
statehandler = {}
statehandler['IdleState'] = idle_handler
statehandler['WaitState'] = wait_handler
statehandler['GetState'] = get_handler
statehandler['PutState'] = put_handler
Esempio n. 10
0
 def save(self):
     FileHelper(self.high_scores_file).save(HighScores.high_scores)
Esempio n. 11
0
 def load(self):
     HighScores.high_scores = FileHelper(self.high_scores_file).load()
     if HighScores.high_scores == None:
         keys = [i + 1 for i in range(10)]
         values = [("AAA", 100 * (i + 1)) for i in range(10, 0, -1)]
         HighScores.high_scores = dict(itertools.izip(keys, values))
Esempio n. 12
0
class BackupFiles:
    drivepathinternal = os.getenv('BMU_INT_ROOT')
    drivepathexternal = os.getenv('BMU_EXT_ROOT')
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_helper = DBHelper()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]
    file_helper = FileHelper()

    def __init__(self):
        pass

    def backup_files(self, backupgroup_id, external):
        logger = self.log
        filehelper = self.file_helper
        if external:
            drivepath = self.drivepathexternal
        else:
            drivepath = self.drivepathinternal

        drive_info = self.get_drive(backupgroup_id, external)

        logger.info({
            'action': 'Starting Backuping Files',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info
        })

        free_disk_space, free_quota = self.get_free_space(
            drive_info, drivepath)
        logger.info({
            'action': 'Free Space',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info,
            'free_quota': free_quota,
            'free_space': free_disk_space
        })

        if free_disk_space <= 0 or free_quota <= 0:
            logger.warn({
                'action': 'Disk Full, Aborting',
                'backup_group': backupgroup_id,
                'external': external,
                'Drive Info': drive_info,
                'free_quota': free_quota,
                'free_space': free_disk_space
            })
            return drive_info["id"]
        files_to_save = self.get_filestosave(backupgroup_id, external)
        total_files = len(files_to_save)
        files_saved = 0
        logger.info({
            'action': 'Files To backup',
            'backup_group': backupgroup_id,
            'external': external,
            'files_to_backup': total_files
        })
        skip_big = 0
        for file_to_save in files_to_save:
            # # temporaray code for testing
            #
            # if file_to_save["filesize"] > 5000000000:
            #    logger.info("Skipping File to big because of temporary file Size limit 5GB : %s" % file_to_save)
            #    continue
            # # End of Temporary Code
            if free_disk_space < file_to_save[
                    "filesize"] or free_quota < file_to_save["filesize"]:
                logger.info({
                    'action': 'Skipping File to big for remaining Space',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save
                })
                skip_big += 1
                continue
            target = filehelper.path_from_hash(drivepath, drive_info["name"],
                                               file_to_save["hash"])
            source = filehelper.buffer_path_from_hash(file_to_save["hash"],
                                                      backupgroup_id)

            logger.info({
                'action': 'Copying File',
                'backup_group': backupgroup_id,
                'external': external,
                'file_to_backup': file_to_save
            })
            if not filehelper.copy_file(source, target):
                logger.error({
                    'action': 'Copying File',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'source': source,
                    'target': target
                })
                self.mark_item(backupgroup_id, file_to_save["hash"], external,
                               -9)
                continue
            hash_tgt = filehelper.hash_file(target)
            if hash_tgt != file_to_save["hash"]:
                logger.error({
                    'action': 'Hash not Matching',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'hash_target': hash_tgt,
                    'target': target
                })
                hash_src_new = filehelper.hash_file(source)
                if file_to_save["hash"] == hash_src_new:
                    filehelper.delete_file(target)
                    self.mark_item(backupgroup_id, file_to_save["hash"],
                                   external, -1)
                    logger.error(
                        "File changed during copying from buffer %s : %s != %s"
                        % (target, hash_tgt, hash_src_new))
                    logger.error({
                        'action': 'File changed during copying from buffer',
                        'backup_group': backupgroup_id,
                        'external': external,
                        'file_to_backup': file_to_save,
                        'hash_target': hash_tgt,
                        'target': target,
                        'hash_src_new': hash_src_new
                    })
                    continue
                else:
                    filehelper.delete_file(target)
                    self.mark_item(backupgroup_id, file_to_save["hash"],
                                   external, -2)
                    logger.error({
                        'action':
                        'Buffered File does not produce correct hash',
                        'backup_group': backupgroup_id,
                        'external': external,
                        'file_to_backup': file_to_save,
                        'hash_target': hash_tgt,
                        'target': target,
                        'hash_src_new': hash_src_new
                    })
                    continue
            else:
                self.mark_item(backupgroup_id, file_to_save["hash"], external,
                               drive_info["id"])
                logger.info({
                    'action': 'Backup File Successful',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'hash_target': hash_tgt,
                    'target': target
                })
                files_saved += 1

            free_quota = free_quota - file_to_save["filesize"]
            free_disk_space = filehelper.freespace(drivepath)
            logger.info({
                'action': 'Remaining Free Space',
                'backup_group': backupgroup_id,
                'external': external,
                'Drive Info': drive_info,
                'free_quota': free_quota,
                'free_space': free_disk_space
            })
        logger.info({
            'action': 'Finished Backup',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info,
            'free_quota': free_quota,
            'free_space': free_disk_space,
            'Files_To_Save': total_files,
            'Files_Saved': files_saved
        })
        if skip_big > 0:
            return drive_info["id"]
        else:
            return 0

    def get_filestosave(self, backupgroup_id: int, external: bool):
        cursor = self.cursor
        tracking_field = 'DRIVE1_ID'
        if external:
            tracking_field = 'DRIVE2_ID'
        sql_getfilesforrun = """
        Select i.id as item_id, i.hash as hash,
            i.filesize as filesize,
            i.drive1_id as drive1_id, i.drive2_id as drive2_id, i.buffer_status
            from ITEMS i
            where (i.%s is null or i.%s = 0)
            and i.buffer_status = 1
            and i.backupgroup_id = %s
            order by filesize desc
        """ % (tracking_field, tracking_field, backupgroup_id)

        # print(sql_getfilesforrun)

        try:
            cursor.execute(sql_getfilesforrun)
            files = cursor.fetchall()
            return files

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def get_drive(self, backupgroup_id, external):
        cursor = self.cursor
        sql_getdrive = """SELECT id, name, drivefull, extern, maxsize, drive_id, group_id FROM DRIVES d
            inner join DRIVES_GROUPS dg
            on d.id = dg.drive_id
            where group_id = %s and drivefull = false and extern = %s limit 1
        """ % (backupgroup_id, external)

        try:
            cursor.execute(sql_getdrive)
            result = cursor.fetchone()

            return result

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return {}

    def get_free_space(self, drive_info: dict, drivepath: str):
        filehelper = self.file_helper
        cursor = self.cursor
        disk = filehelper.freespace(drivepath)
        sql_getusedspace = """
        select sum(size) size from (
        select max(filesize) as size, i.hash  from ITEMS i
        where
        i.backupgroup_id = %s and (i.DRIVE1_ID = %s or i.DRIVE2_ID = %s)
        group by i.hash) x
        """ % (drive_info["group_id"], drive_info["id"], drive_info["id"])
        # print(sql_getusedspace)

        try:
            cursor.execute(sql_getusedspace)
            result = cursor.fetchone()
            # print(result)
            if result["size"] is None:
                logical = int(drive_info["maxsize"])
            else:
                logical = int(drive_info["maxsize"]) - int(result["size"])
            return disk, logical

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return disk, 0

    def mark_item(self, bg_id, hash, external, status):
        tracking_field = 'DRIVE1_ID'
        if external:
            tracking_field = 'DRIVE2_ID'
        cursor = self.cursor
        sql_updateitem = 'update ITEMS i set %s = %s where backupgroup_id= %s and hash = "%s" ' % \
                             (tracking_field, status, bg_id, hash)

        try:
            cursor.execute(sql_updateitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def is_hash_known(self, hash, backup_group):
        cursor = self.cursor
        sql_updateitem = 'select id from ITEMS where backupgroup_id = %s and hash = \'%s\'' % \
                         (backup_group, hash)

        try:
            cursor.execute(sql_updateitem)
            data = cursor.fetchall()
            if len(data) == 0:
                return 0
            else:
                return data[0]["id"]

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return 0

    def change_item_in_bui(self, bui_id, item_id, hash):
        cursor = self.cursor
        sql_updatebuitem = 'update BACKUPITEMS  set item_id  = %s, hash = \'%s\' where id = %s ' % \
                           (item_id, hash, bui_id)
        print(sql_updatebuitem)

        try:
            cursor.execute(sql_updatebuitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def create_item(self, bg_id, hash, external, status, size):
        tracking_field = 'DRIVE1_ID'
        sql_insertitem = 'insert into ITEMS (backupgroup_id, hash, %s, filesize) values (%s, \'%s\', %s, %s)' % \
                         (tracking_field, bg_id, hash, status, size)
        if external:
            tracking_field = 'DRIVE2_ID'
            sql_insertitem = 'insert into ITEMS (backupgroup_id, hash, DRIVE1_ID, DRIVE2_ID, filesize) values (%s, \'%s\',  -12, %s, %s)' % \
                             (bg_id, hash, status, size)
        cursor = self.cursor

        try:
            cursor.execute(sql_insertitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def close_finished_runs(self):
        sql_get_finished = """
        Select id, coalesce(x.count, 0) as count from RUNS r
        LEFT OUTER JOIN (
            Select run_id, count(*) as count
            from BACKUPITEMS b
            inner join ITEMS i
            on (b.item_id = i.id)
            where i.DRIVE1_ID < 0 or i.DRIVE2_ID < 0
            group by run_id
        ) x
        on r.id = x.run_id
        where
        (ALL_SAVED IS NULL or ALL_SAVED = 0)
        and
        id not in (
            Select distinct b.run_id as run_id
            from BACKUPITEMS b
            inner join ITEMS i
            on (b.item_id = i.id)
            where ((i.DRIVE1_ID is null or i.DRIVE1_ID = 0) or (i.DRIVE2_ID is null or i.DRIVE2_ID = 0)) )
        """
        sql_update_run = "UPDATE RUNS SET ALL_SAVED = 1, ERRORS_SAVING = %s where ID = %s"

        cursor = self.cursor

        try:
            cursor.execute(sql_get_finished)
            runs = cursor.fetchall()
            logger = self.log
            for run in runs:
                cursor.execute(sql_update_run, (run["count"], run["id"]))
                logger.info("Saved Run %s with %s Errors" %
                            (run["id"], run["count"]))
                logger.info({
                    'action': 'Saved Runs',
                    'run_id': run["id"],
                    'Errors': run["count"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def cleanupBuffer(self):
        fh = FileHelper()
        dbh = DBHelper()
        logger = self.log

        sql_savedbuffer = "select * from ITEMS where (DRIVE1_ID > 0  and DRIVE2_ID > 0) and buffer_status = 1 order by id "
        sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 2 WHERE ID = %s"
        usage = fh.bufferusage()
        print(usage)

        try:
            db = dbh.getDictCursor()
            cursor = db["cursor"]
            cursor.execute(sql_savedbuffer)
            result = cursor.fetchall()

            for file in result:
                if usage <= 0.8:
                    break
                fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
                usage = fh.bufferusage()
                cursor.execute(sql_updatebufferstatus, (file["ID"]))
                print("removed %s from buffer for BG %s " %
                      (file["HASH"], file["BACKUPGROUP_ID"]))
                print(usage)
                logger.info({
                    'action': 'Removed from Buffer',
                    'hash': file["HASH"],
                    'bachup_group': file["BACKUPGROUP_ID"],
                    "size": file["FILESIZE"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def set_drive_full(self, id):

        cursor = self.cursor
        sql_updateitem = 'update DRIVES set drivefull = 1 where id=%s ' % id

        try:
            cursor.execute(sql_updateitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)