Пример #1
0
    def preprocess(self):
        """Executes the preprocessing which generates the data used for learning
        """
        print('Starting Preprocessing Phase...')
        start = datetime.datetime.now()
        raw_data = FileHelper.read_data_lower(
            self.config['preprocessing']['input_file'])
        # Model the characters as integers
        char2intDict = self.generate_char_to_int_dictionary(raw_data)
        # FileHelper.save_object_to_file('preprocessingCheckpoints/char2indexDict', char2indexDict)
        FileHelper.save_object_to_file(
            self.config['preprocessing']['checkpoints']['char2intDict_file'],
            char2intDict)
        int2CharDict = self.generate_int_to_char_dictionary(raw_data)
        FileHelper.save_object_to_file(
            self.config['preprocessing']['checkpoints']['int2charDict_file'],
            int2CharDict)

        # Generate the text patterns
        X, Y = self.generate_training_patterns(raw_data, char2intDict)
        end = datetime.datetime.now()
        deltaTime = end - start
        print('Preprocessing finished: %ds' % deltaTime.total_seconds())

        return X, Y, char2intDict, int2CharDict
Пример #2
0
 def __init__(self, config, environ, logdispatcher, statechglogger):
     '''
     Constructor
     '''
     RuleKVEditor.__init__(self, config, environ, logdispatcher,
                           statechglogger)
     self.rulenumber = 169
     self.rulename = 'DisableAutoLogin'
     self.formatDetailedResults("initialize")
     self.applicable = {
         'type': 'white',
         'os': {
             'Mac OS X': ['10.15', 'r', '10.15.10']
         }
     }
     self.mandatory = True
     self.rootrequired = True
     self.files = {
         "kcpassword": {
             "path": "/etc/kcpassword",
             "remove": True,
             "content": None,
             "permissions": None,
             "owner": None,
             "group": None,
             "eventid": str(self.rulenumber).zfill(4) + "kcpassword"
         }
     }
     self.addKVEditor(
         "DisableAutoLogin", "defaults",
         "/Library/Preferences/com.apple.loginwindow", "", {
             "autoLoginUser": [
                 re.escape(
                     "The domain/default pair of (/Library/Preferences/com.apple.loginwindow, autoLoginUser) does not exist"
                 ), None
             ]
         }, "present", "",
         "This variable is to determine whether or not to " +
         "disable auto login", None, False, {})
     self.fh = FileHelper(self.logdispatch, self.statechglogger)
     self.ch = CommandHelper(self.logdispatch)
     for filelabel, fileinfo in sorted(self.files.items()):
         self.fh.addFile(filelabel, fileinfo["path"], fileinfo["remove"],
                         fileinfo["content"], fileinfo["permissions"],
                         fileinfo["owner"], fileinfo["group"],
                         fileinfo["eventid"])
     self.sethelptext()
Пример #3
0
    def generate_char_to_int_dictionary(self, data):
        """Creates the mapping of unique characters to unique integers
        """
        # Sets are unordered collections of unique elements
        charSet = set(data)
        # Put the set into a list and sort it
        chars = list(charSet)
        vocab = sorted(chars)
        # FileHelper.save_object_to_file('preprocessingCheckpoints/vocab', vocab)
        FileHelper.save_object_to_file(
            self.config['preprocessing']['checkpoints']['vocabulary_file'],
            vocab)

        chars_len = len(data)
        vocab_len = len(vocab)
        print(
            'Input data consists of %d Total Characters and a Vocabular of %d Characters'
            % (chars_len, vocab_len))
        return dict(
            (character, index) for index, character in enumerate(vocab))
Пример #4
0
def main():
    fh = FileHelper()
    dbh = DBHelper()

    sql_savedbuffer = "select * from ITEMS where buffer_status = 88 order by id"
    sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 89 WHERE ID = %s"
    usage = fh.bufferusage()
    print(usage)

    try:
        db = dbh.getDictCursor()
        cursor = db["cursor"]
        cursor.execute(sql_savedbuffer)
        result = cursor.fetchall()

        for file in result:
            # if usage <= 0.8:
            #    break
            fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
            usage = fh.bufferusage()
            cursor.execute(sql_updatebufferstatus, (file["ID"]))
            print("removed %s from buffer for BG %s " %
                  (file["HASH"], file["BACKUPGROUP_ID"]))
            print(usage)

    except Exception as e:
        print("Exception")  # sql error
        print(e)
        tb = e.__traceback__
        traceback.print_tb(tb)
Пример #5
0
    def cleanupBuffer(self):
        fh = FileHelper()
        dbh = DBHelper()
        logger = self.log

        sql_savedbuffer = "select * from ITEMS where (DRIVE1_ID > 0  and DRIVE2_ID > 0) and buffer_status = 1 order by id "
        sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 2 WHERE ID = %s"
        usage = fh.bufferusage()
        print(usage)

        try:
            db = dbh.getDictCursor()
            cursor = db["cursor"]
            cursor.execute(sql_savedbuffer)
            result = cursor.fetchall()

            for file in result:
                if usage <= 0.8:
                    break
                fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
                usage = fh.bufferusage()
                cursor.execute(sql_updatebufferstatus, (file["ID"]))
                print("removed %s from buffer for BG %s " %
                      (file["HASH"], file["BACKUPGROUP_ID"]))
                print(usage)
                logger.info({
                    'action': 'Removed from Buffer',
                    'hash': file["HASH"],
                    'bachup_group': file["BACKUPGROUP_ID"],
                    "size": file["FILESIZE"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
Пример #6
0
    def send_request(self) -> None:
        """Send request"""
        if self.state == self.State.LOGIN:
            self.authentication_thread.socket.send(
                json.dumps(
                    self.messages.message_login(self.settings.username,
                                                self.settings.password)))

        elif self.state == self.State.SET_NODE_METADATA:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_node_metadata(
                        self.node_id,
                        self.network_id,
                        "some name",
                        "some description",
                        61.454759,
                        23.885602,
                        0.0,
                        True,
                        False,
                    )))

        elif self.state == self.State.CREATE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_building("New building")))

        elif self.state == self.State.CREATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_floor_plan(
                        self.messages.new_building_id, "New floor plan")))

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_file_path))))

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_thumbnail_file_path))))

        elif self.state == self.State.UPDATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_update_floor_plan(
                        self.messages.new_floor_plan_id,
                        image_id=self.floor_plan_image_id,
                        image_thumbnail_id=self.floor_plan_image_thumbnail_id,
                        latitude_lefttop=61.454823,
                        longitude_lefttop=23.884526,
                        altitude_lefttop=0,
                        x_normcoord_lefttop=0.0748329808357999,
                        y_normcoord_lefttop=0.203506328386351,
                        latitude_righttop=61.454773,
                        longitude_righttop=23.886096,
                        altitude_righttop=0,
                        x_normcoord_righttop=0.903860782456575,
                        y_normcoord_righttop=0.203571943827163,
                        latitude_leftbottom=61.454612,
                        longitude_leftbottom=23.884503,
                        altitude_leftbottom=0,
                        x_normcoord_leftbottom=0.0747559429065484,
                        y_normcoord_leftbottom=0.780014805319742,
                        latitude_rightbottom=61.454562,
                        longitude_rightbottom=23.88607,
                        altitude_rightbottom=0,
                        x_normcoord_rightbottom=0.904069882566427,
                        y_normcoord_rightbottom=0.78039444527477,
                        x_distance_point1=0.450065006833406,
                        y_distance_point1=0.203192686229106,
                        x_distance_point2=0.449649314572983,
                        y_distance_point2=0.780260953915855,
                        distance_in_m=25.1,
                        level=0,
                        image_width=self.floor_plan_image_width,
                        image_height=self.floor_plan_image_height,
                    )))

        elif self.state == self.State.ADD_NODE_TO_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_add_node_to_floor_plan(
                        self.node_id, self.network_id,
                        self.messages.new_floor_plan_id)))

        elif self.state == self.State.REMOVE_NODE_FROM_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_remove_node_from_floor_plan(
                        self.node_id, self.network_id)))

        elif self.state == self.State.DELETE_NODE:
            # Sleep for a while before deletion that node addition has reached
            # all backend components
            time.sleep(5)

            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_node(self.node_id,
                                                      self.network_id, False)))

        elif self.state == self.State.DELETE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_floor_plan(
                        self.messages.new_floor_plan_id)))

        elif self.state == self.State.DELETE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_building(
                        self.messages.new_building_id)))
Пример #7
0
def main():
    fh = FileHelper()
    src = 'D:\\backup\\test\\dir1234\\test.txt'

    print('------')
    print("get parent test")
    parent = fh.get_parent(src)
    print(parent)
    print()

    print('------')
    print("get filename test")
    basename = fh.get_filename(src)
    print(basename)
    print()

    print('------')
    print("hash test")
    fhash = fh.hash_file(src)
    print(fhash)
    print()

    print('------')
    print("path from hash test")
    tgt = fh.path_from_hash('D:\\backup\\test', 'TST0001', fhash)
    print(tgt)
    print()

    print('------')
    print("move test")
    fh.move_file(src, tgt)
    if os.path.isfile(tgt):
        print("OK: Target Exists")
    else:
        print("Error: Target does not Exists")
    if os.path.isfile(src):
        print("Error: Source Exists")
    else:
        print("OK: Source does not Exists")
    print()

    print('------')
    print("copy test")
    fh.copy_file(tgt, src)
    if os.path.isfile(tgt):
        print("OK: Target Exists")
    else:
        print("Error: Target does not Exists")
    if os.path.isfile(src):
        print("OK: Source Exists")
    else:
        print("Error: Source does not Exists")
    print()

    print('------')
    print("delete test")
    fh.delete_file(tgt)
    if os.path.isfile(tgt):
        print("Error: Target Exists")
    else:
        print("OK: Target does not Exists")
    if os.path.isfile(src):
        print("OK: Source Exists")
    else:
        print("Error: Source does not Exists")
    print()

    print('------')
    print("create parent test")
    basedir = 'D:\\backup\\test\\'
    dir1 = basedir + ''.join(
        random.choices(string.ascii_uppercase + string.digits, k=5))
    dir2 = dir1 + '\\' + ''.join(
        random.choices(string.ascii_uppercase + string.digits, k=5))
    filename = dir2 + "\\" + "test.txt"
    print(dir1)
    print(dir2)
    print(filename)
    fh.create_parent_if_not_exist(filename)
    if os.path.isdir(dir1):
        print("OK: Dir1 Exists")
    else:
        print("Error: Dir1 does not Exists")
    if os.path.isfile(src):
        print("OK: Dir2 Exists")
    else:
        print("Error: Dir2 does not Exists")
Пример #8
0
def main():
    fh = FileHelper()
    dbh = DBHelper()
    sql_runs = "Select * from RUNS " \
               "where id in (select distinct run_id from BACKUPITEMS) " \
               "and TIME_STARTED < DATE_SUB(NOW(), INTERVAL 60 DAY)"

    runs = []
    target_folder = 'b:/current/archive/bmu/'
    try:
        db = dbh.getDictCursor()
        cursor = db["cursor"]
        cursor.execute(sql_runs)
        result = cursor.fetchall()
        for r in result:
            print(r)
            runs.append(r["ID"])

    except Exception as e:
        print("Exception")  # sql error
        print(e)
        tb = e.__traceback__
        traceback.print_tb(tb)

    for run_id in runs:
        print("Run: %s" % run_id)

        target_file = "%s%s.bmu" % (target_folder, run_id)

        sql_run = "select * from RUNS where id = %s"
        sql_bui = "SELECT B.*, F.path FROM BACKUPITEMS B inner join FILES F ON B.file_id = F.id where run_id = %s"

        sql_DELETE = "DELETE FROM BACKUPITEMS WHERE run_id = %s"

        rundata = {}

        try:
            cursor.execute(sql_run, run_id)
            result = cursor.fetchone()
            rundata["run"] = result

            cursor.execute(sql_bui, run_id)
            result = cursor.fetchall()

            buis = []
            for bui in result:
                buis.append(bui)

            # print(buis)
            rundata["backupitems"] = buis

            # print(rundata)
            fh.save_dict_to_file(rundata, target_file)

            cursor.execute(sql_DELETE, run_id)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
Пример #9
0
    def __init__(self, config, environ, logdispatcher, statechglogger):
        """

        @param config:
        @param environ:
        @param logdispatcher:
        @param statechglogger:
        """

        Rule.__init__(self, config, environ, logdispatcher,
                              statechglogger)
        self.rulenumber = 255
        self.rulename = 'ConfigureKerberos'
        self.formatDetailedResults("initialize")
        self.mandatory = True
        self.sethelptext()
        self.rootrequired = True
        self.guidance = []
        self.applicable = {'type': 'white', 'family': 'linux',
                           'os': {'Mac OS X': ['10.15', 'r', '10.15.10']}}
        # This if/else statement fixes a bug in Configure Kerberos that
        # occurs on Debian systems due to the fact that Debian has no wheel
        # group by default.
        if self.environ.getosfamily() == 'darwin':
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": MACKRB5,
                           "permissions": 0o644,
                           "owner": os.getuid(),
                           "group": "wheel",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"},
                          "edu.mit.Kerberos":
                          {"path": "/Library/Preferences/edu.mit.Kerberos",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "Kerberos"},
                          "edu.mit.Kerberos.krb5kdc.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.krb5kdc.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "krb5kdc"},
                          "kerb5.conf":
                          {"path": "/etc/kerb5.conf",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) + "kerb5"},
                          "edu.mit.Kerberos.kadmind.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.kadmind.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "kadmind"},
                          }
        else:
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": LINUXKRB5,
                           "permissions": 0o644,
                           "owner": "root",
                           "group": "root",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"}}
        self.ch = CommandHelper(self.logdispatch)
        self.fh = FileHelper(self.logdispatch, self.statechglogger)
        if self.environ.getosfamily() == 'linux':
                self.ph = Pkghelper(self.logdispatch, self.environ)
        self.filepathToConfigure = []
        for filelabel, fileinfo in sorted(self.files.items()):
            if fileinfo["remove"]:
                msg = "Remove if present " + str(fileinfo["path"])
            else:
                msg = "Add or update if needed " + str(fileinfo["path"])
            self.filepathToConfigure.append(msg)
            self.fh.addFile(filelabel,
                            fileinfo["path"],
                            fileinfo["remove"],
                            fileinfo["content"],
                            fileinfo["permissions"],
                            fileinfo["owner"],
                            fileinfo["group"],
                            fileinfo["eventid"]
                            )
        # Configuration item instantiation
        datatype = "bool"
        key = "CONFIGUREFILES"
        instructions = "When Enabled will fix these files: " + \
            str(self.filepathToConfigure)
        default = True
        self.ci = self.initCi(datatype, key, instructions, default)
Пример #10
0
class ConfigureKerberos(Rule):
    '''@author: Ekkehard J. Koch'''

    def __init__(self, config, environ, logdispatcher, statechglogger):
        """

        @param config:
        @param environ:
        @param logdispatcher:
        @param statechglogger:
        """

        Rule.__init__(self, config, environ, logdispatcher,
                              statechglogger)
        self.rulenumber = 255
        self.rulename = 'ConfigureKerberos'
        self.formatDetailedResults("initialize")
        self.mandatory = True
        self.sethelptext()
        self.rootrequired = True
        self.guidance = []
        self.applicable = {'type': 'white', 'family': 'linux',
                           'os': {'Mac OS X': ['10.15', 'r', '10.15.10']}}
        # This if/else statement fixes a bug in Configure Kerberos that
        # occurs on Debian systems due to the fact that Debian has no wheel
        # group by default.
        if self.environ.getosfamily() == 'darwin':
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": MACKRB5,
                           "permissions": 0o644,
                           "owner": os.getuid(),
                           "group": "wheel",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"},
                          "edu.mit.Kerberos":
                          {"path": "/Library/Preferences/edu.mit.Kerberos",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "Kerberos"},
                          "edu.mit.Kerberos.krb5kdc.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.krb5kdc.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "krb5kdc"},
                          "kerb5.conf":
                          {"path": "/etc/kerb5.conf",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) + "kerb5"},
                          "edu.mit.Kerberos.kadmind.launchd":
                          {"path": "/Library/Preferences/edu.mit.Kerberos.kadmind.launchd",
                           "remove": True,
                           "content": None,
                           "permissions": None,
                           "owner": None,
                           "group": None,
                           "eventid": str(self.rulenumber).zfill(4) +
                           "kadmind"},
                          }
        else:
            self.files = {"krb5.conf":
                          {"path": "/etc/krb5.conf",
                           "remove": False,
                           "content": LINUXKRB5,
                           "permissions": 0o644,
                           "owner": "root",
                           "group": "root",
                           "eventid": str(self.rulenumber).zfill(4) + "krb5"}}
        self.ch = CommandHelper(self.logdispatch)
        self.fh = FileHelper(self.logdispatch, self.statechglogger)
        if self.environ.getosfamily() == 'linux':
                self.ph = Pkghelper(self.logdispatch, self.environ)
        self.filepathToConfigure = []
        for filelabel, fileinfo in sorted(self.files.items()):
            if fileinfo["remove"]:
                msg = "Remove if present " + str(fileinfo["path"])
            else:
                msg = "Add or update if needed " + str(fileinfo["path"])
            self.filepathToConfigure.append(msg)
            self.fh.addFile(filelabel,
                            fileinfo["path"],
                            fileinfo["remove"],
                            fileinfo["content"],
                            fileinfo["permissions"],
                            fileinfo["owner"],
                            fileinfo["group"],
                            fileinfo["eventid"]
                            )
        # Configuration item instantiation
        datatype = "bool"
        key = "CONFIGUREFILES"
        instructions = "When Enabled will fix these files: " + \
            str(self.filepathToConfigure)
        default = True
        self.ci = self.initCi(datatype, key, instructions, default)

    def report(self):
        '''run report actions for configure kerberos
        determine compliance status of the current system
        return True if compliant, False if non-compliant


        :returns: self.compliant

        :rtype: bool
@author: ???
@change: Breen Malmberg - 2/23/2017 - added doc string; added const checks preamble to report and fix methods

        '''

        self.compliant = True
        self.detailedresults = ""

        # UPDATE THIS SECTION IF YOU CHANGE THE CONSTANTS BEING USED IN THE RULE
        constlist = [MACKRB5, LINUXKRB5]
        if not self.checkConsts(constlist):
            self.compliant = False
            self.detailedresults = "\nPlease ensure that the constants: MACKRB5, LINUXKRB5, in localize.py, are defined and are not None. This rule will not function without them."
            self.formatDetailedResults("report", self.compliant, self.detailedresults)
            return self.compliant

        try:

            if self.environ.getosfamily() == 'linux':
                packagesRpm = ["pam_krb5", "krb5-libs", "krb5-workstation",
                               "sssd-krb5", "sssd-krb5-common"]
                packagesDeb = ["krb5-config", "krb5-user", "libpam-krb5"]
                packagesSuse = ["pam_krb5", "sssd-krb5", "sssd-krb5-common",
                                "krb5-client", "krb5"]
                if self.ph.determineMgr() == "apt-get":
                    self.packages = packagesDeb
                elif self.ph.determineMgr() == "zypper":
                    self.packages = packagesSuse
                else:
                    self.packages = packagesRpm
                for package in self.packages:
                    if not self.ph.check(package) and self.ph.checkAvailable(package):
                        self.compliant = False
                        self.detailedresults += package + " is not installed\n"
            if not self.fh.evaluateFiles():
                self.compliant = False
                self.detailedresults += self.fh.getFileMessage()

        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception:
            self.compliant = False
            self.detailedresults += str(traceback.format_exc())
            self.logdispatch.log(LogPriority.ERROR, self.detailedresults)
        self.formatDetailedResults("report", self.compliant, self.detailedresults)
        self.logdispatch.log(LogPriority.INFO, self.detailedresults)
        return self.compliant

    def fix(self):
        '''run fix actions


        :returns: self.rulesuccess

        :rtype: bool
@author: ???
@change: Breen Malmberg - 2/23/2017 - added doc string; added checkconsts preamble to ensure
        the rule does not attempt to run without requied information (from localize.py)

        '''

        self.rulesuccess = True
        self.detailedresults = ""
        self.iditerator = 0

        # UPDATE THIS SECTION IF YOU CHANGE THE CONSTANTS BEING USED IN THE RULE
        constlist = [MACKRB5, LINUXKRB5]
        if not self.checkConsts(constlist):
            fixsuccess = False
            self.formatDetailedResults("fix", fixsuccess, self.detailedresults)
            return fixsuccess

        try:

            eventlist = self.statechglogger.findrulechanges(self.rulenumber)
            for event in eventlist:
                self.statechglogger.deleteentry(event)

            if self.ci.getcurrvalue():
                pkgsToInstall = []
                if self.environ.getosfamily() == 'linux':
                    for package in self.packages:
                        if not self.ph.check(package):
                            if self.ph.checkAvailable(package):
                                pkgsToInstall.append(package)
                    for package in pkgsToInstall:
                        if self.ph.install(package):
                            self.iditerator += 1
                            myid = iterate(self.iditerator,
                                           self.rulenumber)
                            event = {"eventtype": "pkghelper",
                                     "pkgname": package,
                                     "startstate": "removed",
                                     "endstate": "installed"}
                            self.statechglogger.recordchgevent(myid, event)
                        else:
                            self.rulesuccess = False
                            self.detailedresults += "Installation of " + package + " did not succeed.\n"
                if not self.fh.fixFiles():
                    self.rulesuccess = False
                    self.detailedresults += self.fh.getFileMessage()
            else:
                self.rulesuccess = False
                self.detailedresults = str(self.ci.getkey()) + " was disabled. No action was taken!"

        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception:
            self.rulesuccess = False
            self.detailedresults += str(traceback.format_exc())
            self.logdispatch.log(LogPriority.ERROR, self.detailedresults)
        self.formatDetailedResults("fix", self.rulesuccess, self.detailedresults)
        self.logdispatch.log(LogPriority.INFO, self.detailedresults)
        return self.rulesuccess
Пример #11
0
class ScanFiles:
    backup_group = 1
    run_id = -1
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_helper = DBHelper()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]
    file_helper = FileHelper()
    file_filter = []
    dir_filter = []

    def __init__(self, backup_group_id):
        self.backup_group = backup_group_id
        self.create_run()
        self.load_filters()

    def load_filters(self):
        cursor = self.cursor
        sql_loadfilefilter = 'Select expression from FILTERS ' \
                             'where (BACKUPGROUP_ID = %s OR BACKUPGROUP_ID is null) ' \
                             'and file = 1'
        sql_loaddirfilter = 'Select expression from FILTERS ' \
                            'where (BACKUPGROUP_ID = %s OR BACKUPGROUP_ID is null) ' \
                            'and dir = 1'
        try:
            cursor.execute(sql_loaddirfilter, (self.backup_group))
            result = cursor.fetchall()
            self.dir_filter = self.compile_filters(result)

            cursor.execute(sql_loadfilefilter, (self.backup_group))
            result = cursor.fetchall()
            self.file_filter = self.compile_filters(result)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def compile_filters(self, result_set):
        result = []
        for data in result_set:
            raw_filter = '^(?=.*' + data["expression"].replace('*',
                                                               '(.*)') + ').*'
            print(raw_filter)
            filter = re.compile(raw_filter)
            result.append(filter)
        return result

    def check_filter(self, filters, path):
        for filter in filters:
            match = filter.match(path)
            if match:
                return True
        return False

    def create_run(self):
        cursor = self.cursor

        sql = "INSERT INTO RUNS (BACKUPGROUP_ID, TIME_STARTED) VALUES (%s, CURRENT_TIMESTAMP)"
        try:
            cursor.execute(sql, (self.backup_group))
            self.run_id = cursor.lastrowid

            self.log.info({
                'action': 'Create Run_ID',
                'run_id': self.run_id,
                'backup_group': self.backup_group
            })
        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def scan_for_files(self):
        cursor = self.cursor

        sql_insert_file = 'INSERT IGNORE INTO FILES (backupgroup_id, path, path_hash) ' \
                          'VALUES (%s, %s, md5(concat(%s, "-", %s)))'
        sql_insert_bu = """
        INSERT INTO BACKUPITEMS (RUN_ID, FILE_ID, FILESIZE, LASTMODIFIED, BACKUPGROUP_ID)
        Select %s, id, %s, %s, %s
        from FILES where path_hash = md5(concat(%s, '-', %s))
        """

        dirs = self.get_basedirs(cursor)

        # ---------------- Scan Dirs
        totalfiles = 0
        for dir in dirs:
            filesperdir = 0
            filterdfiles = 0
            started = int(round(time.time() * 1000))
            self.log.info({
                'action': 'Start scanning Dir',
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'dir': dir['PATH']
            })
            for root, dirs, files in os.walk(dir['PATH']):
                for file in files:
                    filesperdir += 1
                    file_hash = ""

                    if filesperdir % 1000 == 0:
                        cursor = self.new_connection()

                    try:
                        filedata = {}
                        filedata['filepath'] = os.path.join(root, file)
                        filedata['mtime'] = int(
                            round(
                                os.path.getmtime(filedata['filepath']) * 1000))
                        filedata['size'] = os.stat(
                            filedata['filepath']).st_size

                        # file filter
                        filename = self.file_helper.get_filename(
                            filedata['filepath'])
                        if self.check_filter(self.file_filter, filename):
                            print("Filtered (file) out " +
                                  filedata['filepath'] + ' (' + filename + ')')
                            filterdfiles += 1
                            continue

                        # dir filter
                        parent = self.file_helper.get_parent(
                            filedata['filepath'])
                        if self.check_filter(self.dir_filter, parent):
                            print("Filtered (dir) out " +
                                  filedata['filepath'] + ' (' + parent + ')')
                            filterdfiles += 1
                            continue

                        totalfiles += 1
                        with warnings.catch_warnings():
                            warnings.simplefilter("ignore")
                            cursor.execute(
                                sql_insert_file,
                                (self.backup_group, filedata['filepath'],
                                 self.backup_group, filedata['filepath']))
                        cursor.execute(
                            sql_insert_bu,
                            (self.run_id, filedata['size'], filedata['mtime'],
                             self.backup_group, self.backup_group,
                             filedata['filepath']))

                        new_id = cursor.lastrowid

                        affected_rows, file_hash = self.map_unchganged(
                            cursor, filedata, new_id)

                        if affected_rows > 0:
                            self.log.debug({
                                'action': 'Unchanged File',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'count': affected_rows
                            })
                        else:
                            file_hash = self.hash_match_or_create_item(
                                cursor, filedata, new_id)

                        if file_hash is not None:

                            buffer_status = self.check_buffer_status(
                                cursor, file_hash)

                            if buffer_status <= 0:

                                self.buffer_file(cursor, filedata, file_hash,
                                                 new_id)
                            else:
                                self.log.debug({
                                    'action': 'File already Buffered',
                                    'path': filedata['filepath'],
                                    'run_id': self.run_id,
                                    'backup_group': self.backup_group,
                                    'hash': file_hash,
                                    'backup item': new_id
                                })

                    except Exception as e:
                        cursor = self.new_connection()
                        print("Exception")  # sql error
                        print(e)
                        tb = e.__traceback__
                        traceback.print_tb(tb)

                    if totalfiles % 10000 == 0:
                        print("%s Files Scanned. Last Scanned: %s" %
                              (totalfiles, filedata))

                    # print(filedata)
            finished = int(round(time.time() * 1000))
            duration = finished - started
            divider = 1
            if filesperdir > 0:
                divider = filesperdir
            per_file = duration / divider
            self.log.info({
                'action': 'End scanning Dir',
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'dir': dir['PATH'],
                'count': filesperdir,
                'duration': duration,
                'per_file': per_file,
                'filtered': filterdfiles
            })
            cursor = self.new_connection()

        self.log.info({
            'action': 'End scanning Dirs',
            'run_id': self.run_id,
            'backup_group': self.backup_group,
            'count': totalfiles
        })

        # ------------------ SET Hashing Complete
        cursor = self.new_connection()
        sql_sethashingsuccess = 'UPDATE RUNS SET SUCESSFUL = 1 WHERE ID = %s'

        try:
            cursor.execute(sql_sethashingsuccess, (self.run_id))
            self.log.info({
                'action': 'Scanning and Hashing successful',
                'run_id': self.run_id,
                'backup_group': self.backup_group
            })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def buffer_file(self, cursor, filedata, new_hash, new_id):
        sql_update_buffer_status = "Update ITEMS Set BUFFER_STATUS=%s where hash = %s and backupgroup_id = %s"
        sql_check_hash_exists = "select count(*) as count, max(id) as item_id from ITEMS where hash = %s and backupgroup_id = %s"
        sql_updatebuitem = 'update BACKUPITEMS  set item_id  = %s, hash = %s where id = %s '
        # Build Target Path
        bufferpath = self.file_helper.buffer_path_from_hash(
            new_hash, self.backup_group)
        self.file_helper.create_parent_if_not_exist(bufferpath)
        # Copy File
        self.file_helper.copy_file(filedata['filepath'], bufferpath)
        # Validate Hash
        tgt_hash = self.file_helper.hash_file(bufferpath)
        if tgt_hash == new_hash:
            # Set Bufferstatus to 1
            cursor.execute(sql_update_buffer_status,
                           (1, new_hash, self.backup_group))
            self.log.info({
                'action': 'File Buffered Successfully',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'hash': new_hash,
                'backup item': new_id
            })

        else:
            # hash original again
            src_hash = self.file_helper.hash_file(filedata['filepath'])

            if src_hash != tgt_hash:
                # delete target and  set buffer code to -1
                self.file_helper.delete_file(bufferpath)
                cursor.execute(sql_update_buffer_status,
                               (-1, new_hash, self.backup_group))
                self.log.info({
                    'action': 'Could not Buffer: Fast Changing',
                    'path': filedata['filepath'],
                    'run_id': self.run_id,
                    'backup_group': self.backup_group,
                    'hash': new_hash,
                    'backup item': new_id
                })
            else:
                # Check if entry for new Hash exists
                cursor.execute(sql_check_hash_exists,
                               (tgt_hash, self.backup_group))
                rs2 = cursor.fetchone()
                if rs2["count"] == 0:
                    # set orig Item Entry to -2
                    cursor.execute(sql_update_buffer_status,
                                   (-2, new_hash, self.backup_group))
                    # create items entry
                    sql_insertitems = "Insert into ITEMS(backupgroup_id, hash, filesize) VALUES (%s, %s, %s)"
                    cursor.execute(sql_insertitems,
                                   (self.backup_group, tgt_hash,
                                    os.stat(bufferpath).st_size))
                    # move file
                    tgtpath2 = self.file_helper.buffer_path_from_hash(
                        tgt_hash, self.backup_group)
                    self.file_helper.create_parent_if_not_exist(tgtpath2)
                    self.file_helper.move_file(bufferpath, tgtpath2)
                    moved_hash = self.file_helper.hash_file(tgtpath2)
                    if tgt_hash == moved_hash:
                        # update BUI with new item and set buffer_status = 1
                        cursor.execute(sql_updatebuitem,
                                       (rs2["item_id"], tgt_hash, new_id))
                        cursor.execute(sql_update_buffer_status,
                                       (1, tgt_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'File Buffered Successfully but in Changed Version',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': tgt_hash,
                            'old hash': new_hash,
                            'backup item': new_id
                        })
                    else:
                        # Delete file and update  item bufferstatus -4
                        self.file_helper.delete_file(tgtpath2)
                        cursor.execute(sql_update_buffer_status,
                                       (-4, new_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'Could not Buffer: Changed and Fast Changing',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': new_hash,
                            'backup item': new_id
                        })
                else:
                    buffer_status = self.check_buffer_status(tgt_hash)
                    if buffer_status > 0:
                        # delete target and change bui entry
                        self.file_helper.delete_file(bufferpath)
                        cursor.execute(sql_updatebuitem,
                                       (rs2["item_id"], tgt_hash, new_id))
                        cursor.execute(sql_update_buffer_status,
                                       (1, tgt_hash, self.backup_group))
                        self.log.info({
                            'action':
                            'File Buffered Successfully Changed Version already in Buffer',
                            'path': filedata['filepath'],
                            'run_id': self.run_id,
                            'backup_group': self.backup_group,
                            'hash': tgt_hash,
                            'old hash': new_hash,
                            'backup item': new_id
                        })
                    else:
                        # move target
                        tgtpath2 = self.file_helper.buffer_path_from_hash(
                            tgt_hash, self.backup_group)
                        self.file_helper.create_parent_if_not_exist(tgtpath2)
                        self.file_helper.move_file(bufferpath, tgtpath2)
                        moved_hash = self.file_helper.hash_file(tgtpath2)
                        # validate new target
                        if tgt_hash == moved_hash:
                            cursor.execute(sql_updatebuitem,
                                           (rs2["item_id"], tgt_hash, new_id))
                            self.log.info({
                                'action':
                                'File Buffered Successfully Changed Version in existing Item',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'hash': tgt_hash,
                                'old hash': new_hash,
                                'backup item': new_id
                            })
                        else:
                            # Delete target and set buffer status -3
                            self.file_helper.delete_file(tgtpath2)
                            cursor.execute(sql_update_buffer_status,
                                           (-3, new_hash, self.backup_group))
                            self.log.info({
                                'action':
                                'Could not Buffer: Fast Changing in existing item',
                                'path': filedata['filepath'],
                                'run_id': self.run_id,
                                'backup_group': self.backup_group,
                                'hash': new_hash,
                                'backup item': new_id
                            })

    def check_buffer_status(self, cursor, new_hash):
        sql_check_buffer_status = "SELECT BUFFER_STATUS FROM ITEMS I where hash = %s and backupgroup_id = %s"
        # print('[%s | %s]' % (new_hash, self.backup_group))
        cursor.execute(sql_check_buffer_status, (new_hash, self.backup_group))
        rs = cursor.fetchone()
        buffer_status = rs["BUFFER_STATUS"]
        return buffer_status

    def hash_match_or_create_item(self, cursor, filedata, new_id):
        sql_insertitems = "Insert into ITEMS(backupgroup_id, hash, filesize) VALUES (%s, %s, %s)"
        # set hash and create item where necesarry                            #
        sql_sethash = 'UPDATE BACKUPITEMS SET HASH = %s WHERE id = %s'
        new_hash = self.file_helper.hash_file(filedata['filepath'])
        if new_hash is None:
            self.log.warn({
                'action': 'Could not hash',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
            })
            return new_hash
        cursor.execute(sql_sethash, (new_hash, new_id))
        sql_matchwithitems = """
                                     UPDATE BACKUPITEMS t
                                     inner join BACKUPITEMS b
                                     on t.id = b.id
                                     inner join ITEMS i
                                     on i.hash = b.hash
                                     SET b.ITEM_ID = i.id
                                     where b.id = %s and i.backupgroup_id = %s
                                 """
        matched = cursor.execute(sql_matchwithitems,
                                 (new_id, self.backup_group))
        if matched == 0:

            inserted = cursor.execute(
                sql_insertitems,
                (self.backup_group, new_hash, filedata['size']))
            matched = cursor.execute(sql_matchwithitems,
                                     (new_id, self.backup_group))
        else:
            self.log.info({
                'action': 'File Unchanged',
                'path': filedata['filepath'],
                'run_id': self.run_id,
                'backup_group': self.backup_group,
                'count': matched,
                'hash': new_hash
            })
        return new_hash

    def map_unchganged(self, cursor, filedata, new_id):
        # check if file is unchanges
        sql_updateunchanged = """
                                           Update BACKUPITEMS t
                                           inner join
                                           BACKUPITEMS as n
                                           on  t.id = n.id
                                           inner join BACKUPITEMS as c
                                           on c.file_id = n.file_id and c.FILESIZE = n.FILESIZE
                                           and c.lastmodified = n.lastmodified
                                           inner join (select max(id) as id from BACKUPITEMS
                                           where file_id =
                                              (Select id from FILES where path_hash = md5(concat(%s, '-', %s)))
                                           and hash is not null) x
                                           on c.id = x.id
                                           SET t.item_id = c.item_id, t.hash=c.hash
                                           where n.id = %s
                                       """
        sql_gethash = "select hash from BACKUPITEMS as b where b.id = %s"
        affected_rows = cursor.execute(
            sql_updateunchanged,
            (self.backup_group, filedata['filepath'], new_id))
        mapped_hash = None
        if affected_rows > 0:
            cursor.execute(sql_gethash, new_id)
            rs = cursor.fetchone()
            mapped_hash = rs["hash"]
        return affected_rows, mapped_hash

    def get_basedirs(self, cursor):
        sql_dirs = 'Select PATH from DIRECTORY where BACKUPGROUP_ID = %s'
        # ---------------- Get Rlevant Base Dirs
        try:
            cursor.execute(sql_dirs, (self.backup_group))
            dirs = cursor.fetchall()
        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
        return dirs

    def new_connection(self):
        self.db_helper.close(self.db_data)
        self.db_data = self.db_helper.getDictCursor()
        self.cursor = self.db_data["cursor"]
        return self.cursor
Пример #12
0
class DisableAutoLogin(RuleKVEditor):
    '''This class disables Auto Login on the system.'''
    def __init__(self, config, environ, logdispatcher, statechglogger):
        '''
        Constructor
        '''
        RuleKVEditor.__init__(self, config, environ, logdispatcher,
                              statechglogger)
        self.rulenumber = 169
        self.rulename = 'DisableAutoLogin'
        self.formatDetailedResults("initialize")
        self.applicable = {
            'type': 'white',
            'os': {
                'Mac OS X': ['10.15', 'r', '10.15.10']
            }
        }
        self.mandatory = True
        self.rootrequired = True
        self.files = {
            "kcpassword": {
                "path": "/etc/kcpassword",
                "remove": True,
                "content": None,
                "permissions": None,
                "owner": None,
                "group": None,
                "eventid": str(self.rulenumber).zfill(4) + "kcpassword"
            }
        }
        self.addKVEditor(
            "DisableAutoLogin", "defaults",
            "/Library/Preferences/com.apple.loginwindow", "", {
                "autoLoginUser": [
                    re.escape(
                        "The domain/default pair of (/Library/Preferences/com.apple.loginwindow, autoLoginUser) does not exist"
                    ), None
                ]
            }, "present", "",
            "This variable is to determine whether or not to " +
            "disable auto login", None, False, {})
        self.fh = FileHelper(self.logdispatch, self.statechglogger)
        self.ch = CommandHelper(self.logdispatch)
        for filelabel, fileinfo in sorted(self.files.items()):
            self.fh.addFile(filelabel, fileinfo["path"], fileinfo["remove"],
                            fileinfo["content"], fileinfo["permissions"],
                            fileinfo["owner"], fileinfo["group"],
                            fileinfo["eventid"])
        self.sethelptext()

    def report(self):
        '''Report on the status of this rule
        
        @author: Roy Nielsen


        '''
        try:
            self.detailedresults = ""
            self.kvcompliant = False
            self.fhcompliant = False
            self.kvcompliant = RuleKVEditor.report(self)
            if not self.kvcompliant:
                self.detailedresults = "DisableAutoLogin is not compliant!"
            else:
                self.detailedresults = "DisableAutoLogin is compliant!"
            self.fhcompliant = self.fh.evaluateFiles()
            if not self.fhcompliant:
                self.detailedresults = self.detailedresults + "\n" + \
                    self.fh.getFileMessage()
            if not self.fhcompliant or not self.kvcompliant:
                self.compliant = False
        except (KeyboardInterrupt, SystemExit):
            raise
        except Exception as err:
            self.rulesuccess = False
            self.detailedresults = self.detailedresults + "\n" + str(err) + \
                " - " + str(traceback.format_exc())
            self.logdispatch.log(LogPriority.ERROR, self.detailedresults)
        self.formatDetailedResults("report", self.compliant,
                                   self.detailedresults)
        self.logdispatch.log(LogPriority.INFO, self.detailedresults)
        return self.compliant

    def fix(self):
        '''Disables Auto Login
        
        @author: Roy Nielsen


        '''
        try:
            self.detailedresults = ""
            fixed = False
            self.kvfix = False
            self.fhfix = False
            self.kvfix = RuleKVEditor.fix(self)
            if self.kvfix:
                self.fhfix = self.fh.fixFiles()
                if self.fhfix:
                    self.detailedresults = self.detailedresults + "\n" + \
                        self.fh.getFileMessage()
            if not self.kvfix or not self.fhfix:
                fixed = False
        except (KeyboardInterrupt, SystemExit):
            # User initiated exit
            raise
        except Exception as err:
            self.rulesuccess = False
            self.detailedresults = self.detailedresults + "\n" + str(err) + \
                " - " + str(traceback.format_exc())
            self.logdispatch.log(LogPriority.ERROR, self.detailedresults)
        self.formatDetailedResults("fix", fixed, self.detailedresults)
        self.logdispatch.log(LogPriority.INFO, self.detailedresults)
        return fixed
Пример #13
0
    def send_request(self) -> None:
        """Send request"""
        if self.state == self.State.LOGIN:
            self.authentication_thread.socket.send(
                json.dumps(
                    self.messages.message_login(self.settings.username,
                                                self.settings.password)))

        elif self.state == self.State.CREATE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_building("New building")))

        elif self.state == self.State.CREATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_floor_plan(
                        self.messages.new_building_id, "New floor plan")))

        elif self.state.name.startswith(self.State.GET_FLOOR_PLANS.name):
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_get_floor_plans(
                        self.messages.new_building_id)))

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_file_path))))

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_thumbnail_file_path))))

        elif self.state == self.State.UPDATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_update_floor_plan(
                        self.messages.new_floor_plan_id,
                        image_id=self.floor_plan_image_id,
                        image_thumbnail_id=self.floor_plan_image_thumbnail_id,
                    )))

        elif self.state == self.state.GET_FLOOR_PLAN_IMAGE:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_get_image(self.floor_plan_image_id)))

        elif self.state == self.state.GET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_get_image(
                        self.floor_plan_image_thumbnail_id)))

        elif self.state == self.State.DELETE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_floor_plan(
                        self.messages.new_floor_plan_id)))

        elif self.state == self.State.DELETE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_building(
                        self.messages.new_building_id)))
Пример #14
0
    def parse_response(self, message: str) -> bool:
        """Parse response

        Args:
            message (str): received message

        Returns:
            bool: True if response's request succeeded
        """
        if self.state == self.State.LOGIN:
            return self.messages.parse_login(json.loads(message))

        elif self.state == self.State.CREATE_BUILDING:
            return self.messages.parse_create_building(json.loads(message))

        elif self.state == self.State.CREATE_FLOOR_PLAN:
            return self.messages.parse_create_floor_plan(json.loads(message))

        elif self.state.name.startswith(self.State.GET_FLOOR_PLANS.name):
            return self.messages.parse_get_floor_plans(json.loads(message))

        elif self.state == self.State.SET_FLOOR_PLAN_IMAGE:
            parse_result = self.messages.parse_set_image(json.loads(message))

            if parse_result:
                self.floor_plan_image_id = self.messages.image_id

            return parse_result

        elif self.state == self.State.SET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            parse_result = self.messages.parse_set_image(json.loads(message))

            if parse_result:
                self.floor_plan_image_thumbnail_id = self.messages.image_id

            return parse_result

        elif self.state == self.State.GET_FLOOR_PLAN_IMAGE:
            parse_result = self.messages.parse_get_image(json.loads(message))

            if parse_result:
                FileHelper.write_file_content_from_base64(
                    self.temp_floor_plan_image_file_path,
                    self.messages.image_data_base64,
                )

            return parse_result

        elif self.state == self.State.GET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            parse_result = self.messages.parse_get_image(json.loads(message))

            if parse_result:
                FileHelper.write_file_content_from_base64(
                    self.temp_floor_plan_image_thumbnail_file_path,
                    self.messages.image_data_base64,
                )
            return parse_result

        elif self.state == self.State.UPDATE_FLOOR_PLAN:
            return self.messages.parse_update_floor_plan(json.loads(message))

        elif self.state == self.State.DELETE_FLOOR_PLAN:
            return self.messages.parse_delete_floor_plan(json.loads(message))

        elif self.state == self.State.DELETE_BUILDING:
            return self.messages.parse_delete_building(json.loads(message))
Пример #15
0
 def save(self):
     FileHelper(self.high_scores_file).save(HighScores.high_scores)
Пример #16
0
 def load(self):
     HighScores.high_scores = FileHelper(self.high_scores_file).load()
     if HighScores.high_scores == None:
         keys = [i + 1 for i in range(10)]
         values = [("AAA", 100 * (i + 1)) for i in range(10, 0, -1)]
         HighScores.high_scores = dict(itertools.izip(keys, values))
Пример #17
0
def main():
    X = []
    Y = []
    char2intDict = None
    int2charDict = None
    vocabulary = None
    config = FileHelper.load_config('config.json')

    seq_length = config['preprocessing']['sequence_chars_length']

    # Load data or preprocess
    if not config['preprocessing']['exec_preprocessing']:
        X = FileHelper.load_object_from_file(
            config['preprocessing']['checkpoints']['X_file'])
        Y = FileHelper.load_object_from_file(
            config['preprocessing']['checkpoints']['Y_file'])
        char2intDict = FileHelper.load_object_from_file(
            config['preprocessing']['checkpoints']['char2intDict_file'])
        int2charDict = FileHelper.load_object_from_file(
            config['preprocessing']['checkpoints']['int2charDict_file'])
    else:
        preprocessing = Preprocessing(config)
        X, Y, char2intDict, int2charDict = preprocessing.preprocess()
        FileHelper.save_object_to_file(
            config['preprocessing']['checkpoints']['X_file'], X)
        FileHelper.save_object_to_file(
            config['preprocessing']['checkpoints']['Y_file'], Y)

    vocabulary = FileHelper.load_object_from_file(
        config['preprocessing']['checkpoints']['vocabulary_file'])

    # Save the unshaped version of X because it's needed for generation later
    X_unshaped = X

    # Transform the data to the format the LTSM expects it [samples, timesteps, features]
    X = numpy.reshape(X, (len(X), seq_length, 1))
    # Normalize/rescale all integers to range 0-1
    X = X / float(len(vocabulary))
    # As usual do one-hot encoding for categorial variables to the output variables (vector of zeros with a single 1 --> 0..N-1 categories)
    Y = np_utils.to_categorical(Y)

    training = Training(config)
    # Define the model
    model = training.define_model(X, Y)

    if config['training']['exec_training']:
        # Train the model
        model = training.train(X, Y, char2intDict, vocabulary, model)
    else:
        # Just set the previously trained weights for the model
        model.load_weights(config['training']['load_weights_filename'])
        model.compile(loss='categorical_crossentropy', optimizer='adam')

    if config['generation']['exec_generation']:
        # Generate the random seed used as starting value for text generation
        seed = generate_random_seed(X_unshaped)
        generatedText = generate_text(
            config['generation']['text_chars_length'], int2charDict,
            vocabulary, seed, model)

        # Save the generated text to file
        outputFilename = config['generation']['foldername'] + '/' + \
            datetime.datetime.now().strftime('%Y%m%d_%H_%M_%S') + '.txt'
        FileHelper.write_data(outputFilename, generatedText)
Пример #18
0
def main():
    # get db cursor
    db_helper = DBHelper()
    file_helper = FileHelper()
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]

    log.info({
        'action': 'Restore started',
        'BMU_PATH_SEARCH': os.getenv('BMU_PATH_SEARCH'),
        'BMU_PATH_REPLACE': os.getenv('BMU_PATH_REPLACE'),
        'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
        'BMU_PATH_DELIM': os.getenv('BMU_PATH_DELIM'),
        'BMU_PATH_DEPTH': os.getenv('BMU_PATH_DEPTH'),
        'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT')
    })

    sql = """
        select REPLACE(PATH, '%s', '%s') AS PATH, d.NAME as DRIVE, FILESIZE, i.HASH from BACKUPITEMS b
        inner join ITEMS i
        on b.item_id = i.id
        inner join DRIVES d
        on COALESCE(DRIVE1_ID, DRIVE2_ID) = d.ID
        where b.run_id = %s
        and SUBSTRING_INDEX(path,'%s',%s) = '%s'
        order by COALESCE(DRIVE1_ID, DRIVE2_ID) asc, filesize desc
    """ % (os.getenv('BMU_PATH_SEARCH'), os.getenv('BMU_PATH_REPLACE'),
           os.getenv('BMU_PATH_RUNID'), os.getenv('BMU_PATH_DELIM'),
           os.getenv('BMU_PATH_DEPTH'), os.getenv('BMU_PATH_SELECT'))
    print(sql)
    cursor.execute(sql)
    files_to_restore = cursor.fetchall()

    count = 0
    errors = ""
    error_list = []
    for file_to_restore in files_to_restore:
        # print(file_to_restore)
        unescaped_path = file_to_restore['PATH'].replace('\\\\', '\\')
        # dirty hack: adds second backslash if path starts with backslash
        if str.startswith(unescaped_path, '\\'):
            unescaped_path = '\\' + unescaped_path
        file_to_restore['PATH'] = unescaped_path
        tgt = file_to_restore['PATH']
        src = file_helper.path_from_hash(os.getenv('BMU_INT_ROOT'),
                                         file_to_restore['DRIVE'],
                                         file_to_restore['HASH'])
        if not file_helper.file_exists(tgt):
            while not file_helper.file_exists(src):
                print("Missing: " + src)
                input("Press Enter to continue...")
            if file_helper.file_exists(src):
                try:
                    file_helper.create_parent_if_not_exist(tgt)
                    file_helper.copy_file(src, tgt)
                except Exception as e:
                    print("Exception")  # sql error
                    print(e)
                    tb = e.__traceback__
                    traceback.print_tb(tb)
                    errors += "Could not Copy " + src + " to " + tgt + ": " + str(
                        e)
                    error_list.append({
                        "source": src,
                        "target": tgt,
                        "exception": str(e)
                    })
                count += 1
                print(tgt + " sucessfully restored [" + str(count) + "]")
        else:
            print(tgt + "allready exists, skipping")
        if count % 1000 == 0:
            log.info({
                'action': 'Restore finished',
                'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT'),
                'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
                'count': count,
                'total': len(files_to_restore)
            })

    log.info({
        'action': 'Restore finished',
        'BMU_PATH_SEARCH': os.getenv('BMU_PATH_SEARCH'),
        'BMU_PATH_REPLACE': os.getenv('BMU_PATH_REPLACE'),
        'BMU_PATH_RUNID': os.getenv('BMU_PATH_RUNID'),
        'BMU_PATH_DELIM': os.getenv('BMU_PATH_DELIM'),
        'BMU_PATH_DEPTH': os.getenv('BMU_PATH_DEPTH'),
        'BMU_PATH_SELECT': os.getenv('BMU_PATH_SELECT'),
        'count': count,
        'errors': error_list
    })
Пример #19
0
class BackupFiles:
    drivepathinternal = os.getenv('BMU_INT_ROOT')
    drivepathexternal = os.getenv('BMU_EXT_ROOT')
    log_helper = LogHelper()
    log = log_helper.getLogger()
    db_helper = DBHelper()
    db_data = db_helper.getDictCursor()
    cursor = db_data["cursor"]
    file_helper = FileHelper()

    def __init__(self):
        pass

    def backup_files(self, backupgroup_id, external):
        logger = self.log
        filehelper = self.file_helper
        if external:
            drivepath = self.drivepathexternal
        else:
            drivepath = self.drivepathinternal

        drive_info = self.get_drive(backupgroup_id, external)

        logger.info({
            'action': 'Starting Backuping Files',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info
        })

        free_disk_space, free_quota = self.get_free_space(
            drive_info, drivepath)
        logger.info({
            'action': 'Free Space',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info,
            'free_quota': free_quota,
            'free_space': free_disk_space
        })

        if free_disk_space <= 0 or free_quota <= 0:
            logger.warn({
                'action': 'Disk Full, Aborting',
                'backup_group': backupgroup_id,
                'external': external,
                'Drive Info': drive_info,
                'free_quota': free_quota,
                'free_space': free_disk_space
            })
            return drive_info["id"]
        files_to_save = self.get_filestosave(backupgroup_id, external)
        total_files = len(files_to_save)
        files_saved = 0
        logger.info({
            'action': 'Files To backup',
            'backup_group': backupgroup_id,
            'external': external,
            'files_to_backup': total_files
        })
        skip_big = 0
        for file_to_save in files_to_save:
            # # temporaray code for testing
            #
            # if file_to_save["filesize"] > 5000000000:
            #    logger.info("Skipping File to big because of temporary file Size limit 5GB : %s" % file_to_save)
            #    continue
            # # End of Temporary Code
            if free_disk_space < file_to_save[
                    "filesize"] or free_quota < file_to_save["filesize"]:
                logger.info({
                    'action': 'Skipping File to big for remaining Space',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save
                })
                skip_big += 1
                continue
            target = filehelper.path_from_hash(drivepath, drive_info["name"],
                                               file_to_save["hash"])
            source = filehelper.buffer_path_from_hash(file_to_save["hash"],
                                                      backupgroup_id)

            logger.info({
                'action': 'Copying File',
                'backup_group': backupgroup_id,
                'external': external,
                'file_to_backup': file_to_save
            })
            if not filehelper.copy_file(source, target):
                logger.error({
                    'action': 'Copying File',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'source': source,
                    'target': target
                })
                self.mark_item(backupgroup_id, file_to_save["hash"], external,
                               -9)
                continue
            hash_tgt = filehelper.hash_file(target)
            if hash_tgt != file_to_save["hash"]:
                logger.error({
                    'action': 'Hash not Matching',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'hash_target': hash_tgt,
                    'target': target
                })
                hash_src_new = filehelper.hash_file(source)
                if file_to_save["hash"] == hash_src_new:
                    filehelper.delete_file(target)
                    self.mark_item(backupgroup_id, file_to_save["hash"],
                                   external, -1)
                    logger.error(
                        "File changed during copying from buffer %s : %s != %s"
                        % (target, hash_tgt, hash_src_new))
                    logger.error({
                        'action': 'File changed during copying from buffer',
                        'backup_group': backupgroup_id,
                        'external': external,
                        'file_to_backup': file_to_save,
                        'hash_target': hash_tgt,
                        'target': target,
                        'hash_src_new': hash_src_new
                    })
                    continue
                else:
                    filehelper.delete_file(target)
                    self.mark_item(backupgroup_id, file_to_save["hash"],
                                   external, -2)
                    logger.error({
                        'action':
                        'Buffered File does not produce correct hash',
                        'backup_group': backupgroup_id,
                        'external': external,
                        'file_to_backup': file_to_save,
                        'hash_target': hash_tgt,
                        'target': target,
                        'hash_src_new': hash_src_new
                    })
                    continue
            else:
                self.mark_item(backupgroup_id, file_to_save["hash"], external,
                               drive_info["id"])
                logger.info({
                    'action': 'Backup File Successful',
                    'backup_group': backupgroup_id,
                    'external': external,
                    'file_to_backup': file_to_save,
                    'hash_target': hash_tgt,
                    'target': target
                })
                files_saved += 1

            free_quota = free_quota - file_to_save["filesize"]
            free_disk_space = filehelper.freespace(drivepath)
            logger.info({
                'action': 'Remaining Free Space',
                'backup_group': backupgroup_id,
                'external': external,
                'Drive Info': drive_info,
                'free_quota': free_quota,
                'free_space': free_disk_space
            })
        logger.info({
            'action': 'Finished Backup',
            'backup_group': backupgroup_id,
            'external': external,
            'Drive Info': drive_info,
            'free_quota': free_quota,
            'free_space': free_disk_space,
            'Files_To_Save': total_files,
            'Files_Saved': files_saved
        })
        if skip_big > 0:
            return drive_info["id"]
        else:
            return 0

    def get_filestosave(self, backupgroup_id: int, external: bool):
        cursor = self.cursor
        tracking_field = 'DRIVE1_ID'
        if external:
            tracking_field = 'DRIVE2_ID'
        sql_getfilesforrun = """
        Select i.id as item_id, i.hash as hash,
            i.filesize as filesize,
            i.drive1_id as drive1_id, i.drive2_id as drive2_id, i.buffer_status
            from ITEMS i
            where (i.%s is null or i.%s = 0)
            and i.buffer_status = 1
            and i.backupgroup_id = %s
            order by filesize desc
        """ % (tracking_field, tracking_field, backupgroup_id)

        # print(sql_getfilesforrun)

        try:
            cursor.execute(sql_getfilesforrun)
            files = cursor.fetchall()
            return files

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def get_drive(self, backupgroup_id, external):
        cursor = self.cursor
        sql_getdrive = """SELECT id, name, drivefull, extern, maxsize, drive_id, group_id FROM DRIVES d
            inner join DRIVES_GROUPS dg
            on d.id = dg.drive_id
            where group_id = %s and drivefull = false and extern = %s limit 1
        """ % (backupgroup_id, external)

        try:
            cursor.execute(sql_getdrive)
            result = cursor.fetchone()

            return result

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return {}

    def get_free_space(self, drive_info: dict, drivepath: str):
        filehelper = self.file_helper
        cursor = self.cursor
        disk = filehelper.freespace(drivepath)
        sql_getusedspace = """
        select sum(size) size from (
        select max(filesize) as size, i.hash  from ITEMS i
        where
        i.backupgroup_id = %s and (i.DRIVE1_ID = %s or i.DRIVE2_ID = %s)
        group by i.hash) x
        """ % (drive_info["group_id"], drive_info["id"], drive_info["id"])
        # print(sql_getusedspace)

        try:
            cursor.execute(sql_getusedspace)
            result = cursor.fetchone()
            # print(result)
            if result["size"] is None:
                logical = int(drive_info["maxsize"])
            else:
                logical = int(drive_info["maxsize"]) - int(result["size"])
            return disk, logical

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return disk, 0

    def mark_item(self, bg_id, hash, external, status):
        tracking_field = 'DRIVE1_ID'
        if external:
            tracking_field = 'DRIVE2_ID'
        cursor = self.cursor
        sql_updateitem = 'update ITEMS i set %s = %s where backupgroup_id= %s and hash = "%s" ' % \
                             (tracking_field, status, bg_id, hash)

        try:
            cursor.execute(sql_updateitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def is_hash_known(self, hash, backup_group):
        cursor = self.cursor
        sql_updateitem = 'select id from ITEMS where backupgroup_id = %s and hash = \'%s\'' % \
                         (backup_group, hash)

        try:
            cursor.execute(sql_updateitem)
            data = cursor.fetchall()
            if len(data) == 0:
                return 0
            else:
                return data[0]["id"]

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
            return 0

    def change_item_in_bui(self, bui_id, item_id, hash):
        cursor = self.cursor
        sql_updatebuitem = 'update BACKUPITEMS  set item_id  = %s, hash = \'%s\' where id = %s ' % \
                           (item_id, hash, bui_id)
        print(sql_updatebuitem)

        try:
            cursor.execute(sql_updatebuitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def create_item(self, bg_id, hash, external, status, size):
        tracking_field = 'DRIVE1_ID'
        sql_insertitem = 'insert into ITEMS (backupgroup_id, hash, %s, filesize) values (%s, \'%s\', %s, %s)' % \
                         (tracking_field, bg_id, hash, status, size)
        if external:
            tracking_field = 'DRIVE2_ID'
            sql_insertitem = 'insert into ITEMS (backupgroup_id, hash, DRIVE1_ID, DRIVE2_ID, filesize) values (%s, \'%s\',  -12, %s, %s)' % \
                             (bg_id, hash, status, size)
        cursor = self.cursor

        try:
            cursor.execute(sql_insertitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def close_finished_runs(self):
        sql_get_finished = """
        Select id, coalesce(x.count, 0) as count from RUNS r
        LEFT OUTER JOIN (
            Select run_id, count(*) as count
            from BACKUPITEMS b
            inner join ITEMS i
            on (b.item_id = i.id)
            where i.DRIVE1_ID < 0 or i.DRIVE2_ID < 0
            group by run_id
        ) x
        on r.id = x.run_id
        where
        (ALL_SAVED IS NULL or ALL_SAVED = 0)
        and
        id not in (
            Select distinct b.run_id as run_id
            from BACKUPITEMS b
            inner join ITEMS i
            on (b.item_id = i.id)
            where ((i.DRIVE1_ID is null or i.DRIVE1_ID = 0) or (i.DRIVE2_ID is null or i.DRIVE2_ID = 0)) )
        """
        sql_update_run = "UPDATE RUNS SET ALL_SAVED = 1, ERRORS_SAVING = %s where ID = %s"

        cursor = self.cursor

        try:
            cursor.execute(sql_get_finished)
            runs = cursor.fetchall()
            logger = self.log
            for run in runs:
                cursor.execute(sql_update_run, (run["count"], run["id"]))
                logger.info("Saved Run %s with %s Errors" %
                            (run["id"], run["count"]))
                logger.info({
                    'action': 'Saved Runs',
                    'run_id': run["id"],
                    'Errors': run["count"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def cleanupBuffer(self):
        fh = FileHelper()
        dbh = DBHelper()
        logger = self.log

        sql_savedbuffer = "select * from ITEMS where (DRIVE1_ID > 0  and DRIVE2_ID > 0) and buffer_status = 1 order by id "
        sql_updatebufferstatus = "UPDATE ITEMS SET BUFFER_STATUS = 2 WHERE ID = %s"
        usage = fh.bufferusage()
        print(usage)

        try:
            db = dbh.getDictCursor()
            cursor = db["cursor"]
            cursor.execute(sql_savedbuffer)
            result = cursor.fetchall()

            for file in result:
                if usage <= 0.8:
                    break
                fh.removefrombuffer(file["HASH"], file["BACKUPGROUP_ID"])
                usage = fh.bufferusage()
                cursor.execute(sql_updatebufferstatus, (file["ID"]))
                print("removed %s from buffer for BG %s " %
                      (file["HASH"], file["BACKUPGROUP_ID"]))
                print(usage)
                logger.info({
                    'action': 'Removed from Buffer',
                    'hash': file["HASH"],
                    'bachup_group': file["BACKUPGROUP_ID"],
                    "size": file["FILESIZE"]
                })

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)

    def set_drive_full(self, id):

        cursor = self.cursor
        sql_updateitem = 'update DRIVES set drivefull = 1 where id=%s ' % id

        try:
            cursor.execute(sql_updateitem)

        except Exception as e:
            print("Exception")  # sql error
            print(e)
            tb = e.__traceback__
            traceback.print_tb(tb)
    def send_request(self) -> None:
        """Send request"""
        if self.state == self.State.LOGIN:
            self.authentication_thread.socket.send(
                json.dumps(
                    self.messages.message_login(
                        self.settings.username, self.settings.password
                    )
                )
            )

        elif self.state == self.State.CREATE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(self.messages.message_create_building("New building"))
            )

        elif self.state == self.State.CREATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_floor_plan(
                        self.messages.new_building_id, "New floor plan"
                    )
                )
            )

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_file_path
                        )
                    )
                )
            )

        elif self.state == self.state.SET_FLOOR_PLAN_IMAGE_THUMBNAIL:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_set_image(
                        FileHelper.read_file_content_as_base64(
                            self.floor_plan_image_thumbnail_file_path
                        )
                    )
                )
            )

        elif self.state == self.State.UPDATE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_update_floor_plan(
                        self.messages.new_floor_plan_id,
                        image_id=self.floor_plan_image_id,
                        image_thumbnail_id=self.floor_plan_image_thumbnail_id,
                        latitude_lefttop=61.454823,
                        longitude_lefttop=23.884526,
                        altitude_lefttop=0,
                        x_normcoord_lefttop=0.0748329808357999,
                        y_normcoord_lefttop=0.203506328386351,
                        latitude_righttop=61.454773,
                        longitude_righttop=23.886096,
                        altitude_righttop=0,
                        x_normcoord_righttop=0.903860782456575,
                        y_normcoord_righttop=0.203571943827163,
                        latitude_leftbottom=61.454612,
                        longitude_leftbottom=23.884503,
                        altitude_leftbottom=0,
                        x_normcoord_leftbottom=0.0747559429065484,
                        y_normcoord_leftbottom=0.780014805319742,
                        latitude_rightbottom=61.454562,
                        longitude_rightbottom=23.88607,
                        altitude_rightbottom=0,
                        x_normcoord_rightbottom=0.904069882566427,
                        y_normcoord_rightbottom=0.78039444527477,
                        x_distance_point1=0.450065006833406,
                        y_distance_point1=0.203192686229106,
                        x_distance_point2=0.449649314572983,
                        y_distance_point2=0.780260953915855,
                        distance_in_m=25.1,
                        level=0,
                        image_width=self.floor_plan_image_width,
                        image_height=self.floor_plan_image_height,
                    )
                )
            )

        elif self.state == self.state.CREATE_AREA:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_create_area(
                        self.messages.new_floor_plan_id, "new area"
                    )
                )
            )

        elif self.state == self.state.UPDATE_AREA:
            corner_point_llas = [
                dict(latitude=61.454674940345, longitude=23.8857998957377, altitude=0),
                dict(latitude=61.4546694661163, longitude=23.8859745817991, altitude=0),
                dict(latitude=61.4545636984609, longitude=23.8859602235394, altitude=0),
                dict(latitude=61.4545691231503, longitude=23.8857853265751, altitude=0),
            ]

            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_update_area(
                        self.messages.new_area_id,
                        self.messages.new_floor_plan_id,
                        "updated name",
                        80,
                        0,
                        0,
                        200,
                        corner_point_llas,
                    )
                )
            )

        elif self.state == self.State.GET_AREAS:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_get_floor_plan_areas(
                        self.messages.new_floor_plan_id
                    )
                )
            )

        elif self.state == self.State.DELETE_AREA:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_area(
                        self.messages.new_area_id, self.messages.new_floor_plan_id
                    )
                )
            )

        elif self.state == self.State.DELETE_FLOOR_PLAN:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_floor_plan(
                        self.messages.new_floor_plan_id
                    )
                )
            )

        elif self.state == self.State.DELETE_BUILDING:
            self.metadata_thread.socket.send(
                json.dumps(
                    self.messages.message_delete_building(self.messages.new_building_id)
                )
            )
            statemachine.on_event({'event': 'msg_sent', 'msg': sendmsg})
            return (client, sendmsg)
        segnum = int(
            msg[1:2])  # struct of our segment msg = s<segnum>:<payload>
        payload = msg[3:]
        filehelper.writetofile(payload)
        sendmsg = "ACK:s" + segnum
        sock.sendto(sendmsg.encode(), client)
        statemachine.on_event({'event': 'msg_sent', 'msg': sendmsg})
        return (client, sendmsg)


# TODO: add a state where if an error happens that state machine
#       cant handle, the error state will log that error

filehelper = FileHelper()

server_socket = socket(AF_INET, SOCK_DGRAM)
server_socket.bind(server_addr)
server_socket.setblocking(False)

read_set = set([server_socket])
write_set = set()
error_set = set([server_socket])

statemachine = ServerStateMachine()
statehandler = {}
statehandler['IdleState'] = idle_handler
statehandler['WaitState'] = wait_handler
statehandler['GetState'] = get_handler
statehandler['PutState'] = put_handler