def __init__(self) -> None:

        #region STEP 0: Local variables

        self.__enum = en.David
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput = self.__cf.data["parameters"]["show output"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.SpongeBob
        self.__config = Conny()
        self.__config.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting = self.__config.data["parameters"][
            "allow testing"]["default"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput = self.__config.data["parameters"]["show output"][
            "default"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return
Beispiel #3
0
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Heimi
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #endregion

        #region STEP 2: Public variables

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return
Beispiel #4
0
    def getScalars(self) -> list:
        """
        """

        #   STEP 0: Local variables
        lOut                    = []
        lTmp                    = None

        #   STEP 1: Setup - Local variables
        lTmp                    = self.getSurrogates()

        #   STEP 2: Iterate through surrogates
        for i in range(0, len(lTmp)):
            #   STEP 3: Get config file
            cfTmp   = Conny()
            cfTmp.load(lTmp[i].value[2])

            #   STEP 4: Append to output list
            lOut.append(cfTmp.data["scalars"])

        #   STEP 5: Return
        return lOut

    #
    #   endregion

    #
    #endregion

#
#endregion
Beispiel #5
0
    def getUserInput(self, _eClass: Enum, **kwargs) -> int:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        #   STEP 2: Check if the enum is acceptable
        if (en.isClass(_eClass) == False):
            raise Exception(
                "An error occured in Rae.getUserInput -> Step 2: Invalid enum passed"
            )

        #   STEP 1: (DELAYED) Setup - Local variables
        cfTmp.load(_eClass.value)

        #   STEP 3: Get the menu options
        lMenu = cfTmp.getMenu(kwargs)

        #   STEP 4: Outsource Return
        if ("breakOnInvalid" in kwargs):
            return self.__getUI(lMenu, kwargs["breakOnInvalid"])

        else:
            return self.__getUI(lMenu, False)
Beispiel #6
0
    def getScalars(self) -> list:
        """
        """

        #   STEP 0: Local variables
        lOut = []
        lTmp = None

        #   STEP 1: Setup - Local variables
        lTmp = self.getGAs()

        #   STEP 2: Iterate through algorithms
        for i in range(0, len(lTmp)):
            #   STEP 3: Get config file
            cfTmp = Conny()
            cfTmp.load(lTmp[i].value[2], isGA=True)

            #   STEP 4: append scalars dictionary
            lOut.append(cfTmp.data["scalars"])

        #   STEP 5: Return
        return lOut

    #
    #   endregion

    #
    #endregion


#
#endregion
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.King
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region STEP 1.??: Bools

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"][
            "default"]

        #   endregion

        #endregion

        #region STEP 2: Public variables

        #   region STEP 2.??: Bools

        self.bShowOutput = self.__cf.data["parameters"]["show output"][
            "default"]

        #   endregion

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #endregion

    #region Front-End

    #
    #endregion

    #region Back-End

    #
    #endregion


#
#endregion
Beispiel #8
0
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Rae
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        return
    def __getParams(self) -> list:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        lMenu = []
        lOut = []

        #   STEP 1: Setup - Local variables
        cfTmp.load(self.__enum.value)

        lMenu = cfTmp.data["menus"]["params"]

        #   STEP 2: Iterate through params
        for i in range(0, lMenu["items"]):
            #   STEP 3: Check if substrate
            if (i == 2):
                lOut.append(self.__getSubstrate())

            elif (i == 3):
                lOut.append(self.__getSubstrateHeight())

            else:
                print("Irene (NG) {" + Helga.time() + "} - Please define (" +
                      lMenu[str(i)] + ") {cm}")

                #   STEP 4: User input
                while (True):
                    #   STEP 5: Wait for input
                    ui = input("\t>")
                    os.system("cls")

                    #   STEP 6: Verify input
                    try:
                        #   STEP 7: Cast to float
                        ui = float(ui)

                        lOut.append(ui)
                        break

                    except:
                        print("Irene (NG) {" + Helga.time() +
                              "} - Invalid Input")

        return lOut
    def __getFreq(self) -> list:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        lMenu = []
        lOut = []

        #   STEP 1: Setup - Local variables
        cfTmp.load(self.__enum.value)

        lMenu = cfTmp.data["menus"]["frequency"]

        #   STEP 2: Get frequency menu
        for i in range(0, lMenu["items"]):
            #   STEP 3: User output
            print("Irene (NG) {" + Helga.time() + "} - Please define (" +
                  lMenu[str(i)] + ")")

            #   STEP 4: user input
            while (True):
                #   STEP 5: Wait for input
                ui = input("\t>")
                os.system("cls")

                #   STEP 6: Verify input
                try:
                    #   STEP 7: Cast to float
                    ui = float(ui)

                    lOut.append(ui)
                    break

                except:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 8: Return
        return lOut
Beispiel #11
0
    def getActiveParameters(self) -> list:
        """
        """

        #   STEP 0: Local variables
        lOut                    = []
        lTmp                    = None

        #   STEP 1: Setup - local variables
        lTmp                    = self.getSurrogates()

        #   STPE 2: Iterate through surrogates
        for i in range(0, len(lTmp)):
            #   STEP 3: If active
            if (lTmp[i].value[1] == True):
                #   STEP 4: Get config files
                cfTmp = Conny()
                cfTmp.load(lTmp[i].value[2])

                #   STEP 5: Append to output
                lOut.append(cfTmp.data["parameters"])

        #   STEP 6: Return
        return lOut
Beispiel #12
0
    def __init__(self, _iPopSize: int) -> None:

        #region STEP 0: Local variables

        self.__enum = en.Garry
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region STEP 1.1: Pop size

        self.__iPopSize = _iPopSize

        #   endregion

        #   region STEP 1.2: Init flags

        self.__bTroState = [False, False, False]

        #   endregion

        #   region STEP 1.3: Bools

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #   endregion

        #endregion

        #region STEP 2: Public variables

        #   region STEP 2.1: Population

        self.lPopulation = []

        self.vBestSolution = None
        self.fBestSolution = np.inf
        self.iBestSolution = 0

        #   endregion

        #   region STEP 2.2: TRO

        self.iTroRegion = None
        self.lTroBest = None

        #   endregion

        #   region STEP 2.3: Bools

        self.bShowOutput = self.__cf.data["parameters"]["show output"]

        #   endregion

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return
class Irene:

    #region Init
    """
    """
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Irene
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #endregion

        #region STEP 2: Public variables

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #endregion

    #region Front-End

    def main(self):
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - local variables
        #   STEP 2: User Output
        print(
            "Irene (Main) {" + Helga.time() +
            "} - Welcome. How would you like to start the optimization process?"
        )

        #   STEP 3: User Input
        while (True):
            ui = Rae.getUserInput(self.__enum, menu="main")
            os.system("cls")

            if (ui == 0):
                self.__newGeometry()

            elif (ui == 1):
                self.__importGeometry()

            elif (ui == 2):
                print("Irene (C-Editor) {" + Helga.time() +
                      "} - This functionality is not implemented yet")  #TODO

            elif (ui == 3):
                self.__helpMain()

            elif (ui == 4):
                print("Irene (Exit) {" + Helga.time() + "} - Bye.")
                break

        return

    def configEditor(self):
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables
        #   STEP 2: ??

        return

    #
    #endregion

    #region Back-End

    #   region Back-End: Gets

    def __getFreq(self) -> list:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        lMenu = []
        lOut = []

        #   STEP 1: Setup - Local variables
        cfTmp.load(self.__enum.value)

        lMenu = cfTmp.data["menus"]["frequency"]

        #   STEP 2: Get frequency menu
        for i in range(0, lMenu["items"]):
            #   STEP 3: User output
            print("Irene (NG) {" + Helga.time() + "} - Please define (" +
                  lMenu[str(i)] + ")")

            #   STEP 4: user input
            while (True):
                #   STEP 5: Wait for input
                ui = input("\t>")
                os.system("cls")

                #   STEP 6: Verify input
                try:
                    #   STEP 7: Cast to float
                    ui = float(ui)

                    lOut.append(ui)
                    break

                except:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 8: Return
        return lOut

    def __getParams(self) -> list:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        lMenu = []
        lOut = []

        #   STEP 1: Setup - Local variables
        cfTmp.load(self.__enum.value)

        lMenu = cfTmp.data["menus"]["params"]

        #   STEP 2: Iterate through params
        for i in range(0, lMenu["items"]):
            #   STEP 3: Check if substrate
            if (i == 2):
                lOut.append(self.__getSubstrate())

            elif (i == 3):
                lOut.append(self.__getSubstrateHeight())

            else:
                print("Irene (NG) {" + Helga.time() + "} - Please define (" +
                      lMenu[str(i)] + ") {cm}")

                #   STEP 4: User input
                while (True):
                    #   STEP 5: Wait for input
                    ui = input("\t>")
                    os.system("cls")

                    #   STEP 6: Verify input
                    try:
                        #   STEP 7: Cast to float
                        ui = float(ui)

                        lOut.append(ui)
                        break

                    except:
                        print("Irene (NG) {" + Helga.time() +
                              "} - Invalid Input")

        return lOut

    def __getSubstrate(self) -> float:
        """
        """

        #   STEP 0: Local variables
        uiChoice = None

        #   STEP 1: Setup - Local variables
        #   STEP 2: User Output
        print(
            "Irene (NG) {" + Helga.time() +
            "} - Would you like to specify the substrate by name per permitivitty?"
        )

        #   STEP 3: User Input
        while (True):
            print("\t0: By Name")
            print("\t1: By permitivitty")
            print("")

            #   STEP 4: Wait for user input
            ui = input("\t>")
            os.system("cls")

            #   STEP 5: Validate input
            try:
                #   STEP 6: Cast to int
                ui = int(ui)

                #   STEP 7: Verify range
                if ((ui == 0) or (ui == 1)):
                    uiChoice = ui
                    break

                else:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

            except:
                print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 8: If by value
        if (uiChoice == 1):
            #   STEP 9: User output
            print(
                "Irene (NG) {" + Helga.time() +
                "} - Please choose one of the following default peremitvitties"
            )

            #   STEP 10: User input
            while (True):
                #   STEP 11: Output menu
                lTmpMenu = self.__cf.data["menus"]["substrate values"]
                for i in range(0, lTmpMenu["items"]):
                    print("\t" + str(i) + ": " + str(lTmpMenu[str(i)]))

                print("")

                #   STEP 12: Wait for user input
                ui = input("\t>")
                os.system("cls")

                #   STEP 13: Verify input
                try:
                    #   STEP 14: Cast to int
                    ui = int(ui)

                    #   STEP 15: Verify range
                    if ((ui >= 0) and (ui < lTmpMenu["items"])):
                        return lTmpMenu[str(ui)]

                    else:
                        print("Irene (NG) {" + Helga.time() +
                              "} - Invalid Input")

                except:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 16: By name
        elif (ui == 0):
            #   STEP 16: User Output
            print("Irene (NG) {" + Helga.time() +
                  "} - Please choose on of the following default substrates")

            #   STEP 17: User input
            while (True):
                #   STEP 18: Output menu
                lTmpMenu = self.__cf.data["menus"]["substrate names"]
                for i in range(0, lTmpMenu["items"]):
                    print("\t" + str(i) + ": " + lTmpMenu[str(i)])

                print("")

                #   STEP 19: Wait for user input
                ui = input("\t>")
                os.system("cls")

                #   STEP 20: Verify input
                try:
                    #   STEP 21: Casst to int
                    ui = int(ui)

                    #   STEP 22: Verify range
                    if ((ui >= 0) and (ui < lTmpMenu["items"])):
                        return self.__cf.data["menus"]["substrate values"][str(
                            ui)]

                    else:
                        print("Irene0 (NG) {" + Helga.time() +
                              "} - Invalid Input")

                except:
                    print("Irene1 (NG) {" + Helga.time() + "} - Invalid Input")

        return 0.0

    def __getSubstrateHeight(self) -> float:
        """
        """

        #   STEP 0: Local variables
        lTmpMenu = self.__cf.data["menus"]["substrate heights"]

        #   STEP 1: Setup - Local variables
        #   STEP 2: User Output
        print(
            "Irene (NG) {" + Helga.time() +
            "} - Please choose one of the following default substrate heights")

        #   STEP 3: User Input
        while (True):
            #   STEP 4: Output menu
            for i in range(0, lTmpMenu["items"]):
                print("\t" + str(i) + ": " + str(lTmpMenu[str(i)]) + " {mm}")

            print("")

            #   STEP 5: Wait for input
            ui = input("\t>")
            os.system("cls")

            #   STEP 6: Verify input
            try:
                #   STEP 7: Cast to int
                ui = int(ui)

                #   STEP 8: Verify range
                if ((ui >= 0) and (ui < lTmpMenu["items"])):
                    return lTmpMenu[str(ui)]

                else:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

            except:
                print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        return 0.0

    #
    #   endregion

    #   region Back-End: Antenna-Setup

    def __newGeometry(self) -> None:
        """
        """

        #   STEP 0: Local variables
        lParams = None
        fParent = None

        #   STEP 1: Setup - Local variables
        #   STEP 2: User Output
        print(
            "Irene (NG) {" + Helga.time() +
            "} - Would you like to specify the frequency range or the parameters of the new antenna?"
        )

        #   STEP 3: User Input
        while (True):
            print("\t0: Frequency Range")
            print("\t1: Parameters")
            print("\t2: ~ Help")
            print("\t3: ~ Exit")
            print("")

            #   STEP 4: Wait for user input
            ui = input("\t>")
            os.system("cls")

            #   STEP 5: Verify input
            try:
                #   STEP 6: Cast to int
                ui = int(ui)

                #   STEP 7: Verify range
                if (ui == 0):
                    #   STEP 8: Get frequency range
                    lParams = self.__getFreq()
                    fParent = "frequency"

                    break

                elif (ui == 1):
                    #   STEP 9: Get geometry parameters
                    lParams = self.__getParams()
                    fParent = "params"

                    break

                elif (ui == 2):
                    self.__helpNG()

                elif (ui == 3):
                    print("Irene (Main) {" + Helga.time() +
                          "} - How would you like to continue?")
                    return

                else:
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

            except Exception as ex:
                print(ex)
                print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 10: Energize
        self.__optimize(lParams, parent=fParent)

        #   STEP 10: User Output
        print("Irene (Main) {" + Helga.time() +
              "} - How would you like to continue?")

        #   STEP 11: Return
        return

    def __importGeometry(self) -> None:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()
        sPath = None

        lOut = []

        #   STEP 1: Setup - Local variables
        #   STEP 2: User Output
        print("Irene (IG) {" + Helga.time() +
              "} - Please specify the antenna geometry .json file to use.")

        #   STEP 3: User Input
        while (True):
            #   STEP 4: Wait for input
            ui = input("\t>")
            os.system("cls")

            #   STEP ..: Check for import cancel
            if (ui == "cancel"):
                print("Irene (Main) {" + Helga.time() +
                      "} - How would you like to continue?")
                return

            #   STEP ..: Check for help requres
            if (ui == "help"):
                self.__helpIG()

            else:

                #   STEP 5: Check if input contains \\ or /
                if (("\\" in ui) or ("/" in ui)):
                    #   STEP 6: Assume full path - check existence
                    if (os.path.exists(ui)):
                        sPath = ui
                        break

                    else:
                        print("Irene (NG) {" + Helga.time() +
                              "} - Invalid Input")

                else:
                    #   STEP 7: Check in ConfigFiles
                    sTmpPath = os.path.abspath(
                        ".") + "\\Data\\ConfigFiles\\" + ui

                    if (os.path.exists(sTmpPath)):
                        sPath = sTmpPath
                        break

                    #   STEP 8: Check Exports
                    sTmpPath = os.path.abspath(
                        ".") + "\\Data\\Exports\\Antennas\\" + ui

                    if (os.path.exists(sTmpPath)):
                        sPath = sTmpPath
                        break

                    #   STEP 9: User Output
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 10: Load the json file
        cfTmp.load(sPath)

        #   STEP 11: Append the data
        lOut.append(cfTmp.data["frequency"]["lower"])
        lOut.append(cfTmp.data["frequency"]["upper"])
        lOut.append(cfTmp.data["params"]["length"])
        lOut.append(cfTmp.data["params"]["width"])
        lOut.append(cfTmp.data["params"]["substrate height"])
        lOut.append(cfTmp.data["params"]["permitivitty"])

        #   STEP 12: Optimize using parameters
        self.__optimize(lOut, parent="import")

        #   STEP 13: User Output
        print("Irene (Main) {" + Helga.time() +
              "} - How would you like to continue?")

        #   STEP 14: Return
        return

    #
    #   endregion

    #   region Back-End: Antenna-Optimization

    def __optimize(self, _lParams: list, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        fOffset = None
        bDefault = None

        #   STEP 1: Setup - Local variables
        #   STEP 2: Check if parent was params
        if (kwargs["parent"] == "params"):
            #   STEP 3: User Output
            print(
                "Irene (OP) {" + Helga.time() +
                "} - Would you like to specify the offset for the band edges from the center frequency?"
            )

            #   STEP 4: User Input
            while (True):
                #   STEP 5: Output menu options
                print("\t0: Yes")
                print("\t1: No")
                print("")

                #   STEP 6: Wait for user input
                ui = input("\t>")
                os.system("cls")

                #   STEP 7: Verify input
                try:
                    #   STEP 8: Cast to int
                    ui = int(ui)

                    #   STEP 9: Verify range
                    if (ui == 0):
                        #   STEP 10: User Output
                        print("Irene (OP) {" + Helga.time() +
                              "} - Please specify the offset in Hz")

                        #   STEP 11: User Input
                        while (True):
                            #   STEP 11: Wait for user input
                            ui = input("\t>")
                            os.system("cls")

                            #   STEP 12: Verify input
                            try:
                                #   STEP 13: Cast to float
                                ui = float(ui)

                                fOffset = ui
                                break

                            except:
                                print("Irene (OP) {" + Helga.time() +
                                      "} - Invalid Input")

                        break

                    if (ui == 1):
                        fOffset = 100.0
                        break

                    else:
                        print("Irene (OP) {" + Helga.time() +
                              "} - Invalid Input")

                except:
                    print("Irene (OP) {" + Helga.time() + "} - Invalid Input")

        #   STEP 2: User Output
        print(
            "Irene (OP) {" + Helga.time() +
            "} - Would you like to use default configurations for this project?"
        )

        #   STEP 3: Get some more user input
        while (True):
            print("\t0: Use Default Configurations")
            print("\t1: Don't Use Default Configurations")
            print("")

            #   STEP 4: Wait for user input
            ui = input("\t>")
            os.system("cls")

            #   STEP 5: Verify input
            try:
                #   STEP 6: Cast to int
                ui = int(ui)

                #   STEP 7: Verify range
                if (ui == 0):
                    bDefault = True
                    break

                elif (ui == 1):
                    bDefault = False
                    break

                else:
                    print("Irene (OP) {" + Helga.time() + "} - Invalid Input")

            except:
                print("Irene (OP) {" + Helga.time() + "} - Invalid Input")

        #   STEP 8: Init Natalie
        nat = Natalie(_lParams,
                      bDefault,
                      parent=kwargs["parent"],
                      offset=fOffset)

        #   STEP 9: User Output
        print("Irene (OP) {" + Helga.time() +
              "} - How would you like to continue?")

        #   STPE 10: User Input
        while (True):
            #   STEP 11: Output menu options
            print("\t0: Start Optimization Process")
            print("\t1: ~ Edit Nat config")
            print("\t2: ~ Help")
            print("\t3: ~ Exit")
            print("")

            #   STEP 12: Wait for user input
            ui = input("\t>")
            os.system("cls")

            #   STEP 13: Verify input
            try:
                #   STEP 14: Cast to int
                ui = int(ui)

                #   STEP 15: Verify range
                if (ui == 0):
                    print(
                        "Irene (OP) {" + Helga.time() +
                        "} - Starting Optimization Process, this could take a while."
                    )
                    break

                elif (ui == 1):
                    nat.configEditor()
                    print("Irene (OP) {" + Helga.time() +
                          "} - How would you like to continue?")

                elif (ui == 2):
                    self.__helpOP()

                elif (ui == 3):
                    return

                else:
                    print("Irene (OP) + {" + Helga.time() +
                          "} - Invalid Input")

            except:
                print("Irene (OP) + {" + Helga.time() + "} - Invalid Input")

        nat.optimizeAntenna()

        return

    #
    #   endregion

    #   region Back-End: Output

    #       region Back-End-(Output): Help

    def __helpMain(self) -> None:
        """
        """

        #   STEP 0: Local variables
        dMenu = {}

        #   STEP 1: Setup - Local variables
        dMenu = self.__cf.data["help"]["help main"]

        #   STEP 2: User Output
        print("Irene (H-Main) {" + Helga.time() +
              "} - The following options exist in the Main menu.")
        for i in range(0, dMenu["items"]):
            print(dMenu[str(i)])

        print("")

        #   STEP 3: Wait for user to continue
        input("\t> Continue")
        os.system("cls")

        #   STEP 4: Return
        print("Irene (Main) {" + Helga.time() +
              "} - How would you like to continue?")
        return

    def __helpIG(self) -> None:
        """
        """

        #   STEP 0: Local variables
        dMenu = {}

        #   STEP 1: Setup - Local variables
        dMenu = self.__cf.data["help"]["help ig"]

        #   STEP 2: User Output
        print("Irene (H-IG) {" + Helga.time() +
              "} - The following options exist in the Antenna Import menu.")

        for i in range(0, dMenu["items"]):
            print(dMenu[str(i)])

        print("")

        #   STEP 3: Wait for the user to continue
        input("\t> Continue")
        os.system("cls")

        #   STEP 4: Return
        print("Irene (IG) {" + Helga.time() +
              "} - How would you like to continue?")
        return

    def __helpNG(self) -> None:
        """
        """

        #   STEP 0: Local variables
        dMenu = {}

        #   STEP 1: Setup - Local variables
        dMenu = self.__cf.data["help"]["help ng"]

        #   STEP 2: User Output
        print("Irene (H-NG) {" + Helga.time() +
              "} - The following options exist in the Antenna Creation menu.")

        for i in range(0, dMenu["items"]):
            print(dMenu[str(i)])

        print("")

        #   STEP 3: Wait for user to continue
        input("\t> Continue")
        os.system("cls")

        #   STEP 4: Return
        print("Irene (NG) {" + Helga.time() +
              "} - How would you like to continue?")
        return

    def __helpOP(self) -> None:
        """
        """

        #   STEP 0: Local variables
        dMenu = {}

        #   STEP 1: Setup - Local variables
        dMenu = self.__cf.data["help"]["help op"]

        #   STEP 2: User Output
        print(
            "Irene (H-OP) {" + Helga.time() +
            "} - The following options exist in the Antenna Optimization menu."
        )

        for i in range(0, dMenu["items"]):
            print(dMenu[str(i)])

        print("")

        #   STEP 3: Wait for user to continue
        input("\t> Continue")
        os.system("cls")

        #   STEP 4: Return
        print("Irene (OP) {" + Helga.time() +
              "} - How would you like to continue?")
        return
    def __importGeometry(self) -> None:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()
        sPath = None

        lOut = []

        #   STEP 1: Setup - Local variables
        #   STEP 2: User Output
        print("Irene (IG) {" + Helga.time() +
              "} - Please specify the antenna geometry .json file to use.")

        #   STEP 3: User Input
        while (True):
            #   STEP 4: Wait for input
            ui = input("\t>")
            os.system("cls")

            #   STEP ..: Check for import cancel
            if (ui == "cancel"):
                print("Irene (Main) {" + Helga.time() +
                      "} - How would you like to continue?")
                return

            #   STEP ..: Check for help requres
            if (ui == "help"):
                self.__helpIG()

            else:

                #   STEP 5: Check if input contains \\ or /
                if (("\\" in ui) or ("/" in ui)):
                    #   STEP 6: Assume full path - check existence
                    if (os.path.exists(ui)):
                        sPath = ui
                        break

                    else:
                        print("Irene (NG) {" + Helga.time() +
                              "} - Invalid Input")

                else:
                    #   STEP 7: Check in ConfigFiles
                    sTmpPath = os.path.abspath(
                        ".") + "\\Data\\ConfigFiles\\" + ui

                    if (os.path.exists(sTmpPath)):
                        sPath = sTmpPath
                        break

                    #   STEP 8: Check Exports
                    sTmpPath = os.path.abspath(
                        ".") + "\\Data\\Exports\\Antennas\\" + ui

                    if (os.path.exists(sTmpPath)):
                        sPath = sTmpPath
                        break

                    #   STEP 9: User Output
                    print("Irene (NG) {" + Helga.time() + "} - Invalid Input")

        #   STEP 10: Load the json file
        cfTmp.load(sPath)

        #   STEP 11: Append the data
        lOut.append(cfTmp.data["frequency"]["lower"])
        lOut.append(cfTmp.data["frequency"]["upper"])
        lOut.append(cfTmp.data["params"]["length"])
        lOut.append(cfTmp.data["params"]["width"])
        lOut.append(cfTmp.data["params"]["substrate height"])
        lOut.append(cfTmp.data["params"]["permitivitty"])

        #   STEP 12: Optimize using parameters
        self.__optimize(lOut, parent="import")

        #   STEP 13: User Output
        print("Irene (Main) {" + Helga.time() +
              "} - How would you like to continue?")

        #   STEP 14: Return
        return
Beispiel #15
0
class Rae:

    #region Init
    """
    """
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Rae
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Gets

    @classmethod
    def getUserInput(self, _eClass: Enum, **kwargs) -> int:
        """
        """

        #   STEP 0: Local variables
        cfTmp = Conny()

        #   STEP 2: Check if the enum is acceptable
        if (en.isClass(_eClass) == False):
            raise Exception(
                "An error occured in Rae.getUserInput -> Step 2: Invalid enum passed"
            )

        #   STEP 1: (DELAYED) Setup - Local variables
        cfTmp.load(_eClass.value)

        #   STEP 3: Get the menu options
        lMenu = cfTmp.getMenu(kwargs)

        #   STEP 4: Outsource Return
        if ("breakOnInvalid" in kwargs):
            return self.__getUI(lMenu, kwargs["breakOnInvalid"])

        else:
            return self.__getUI(lMenu, False)

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Gets

    @classmethod
    def __getUI(self, _lMenu: list, _bBreakOnInvalid: bool) -> int:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local Variables
        #   STEP 2: User Input
        while (True):
            #   STEP 3: Output menu
            for i in range(0, len(_lMenu)):
                print("\t" + str(i) + ": " + _lMenu[i])

            print("")

            #   STEP ..: Wait for user input
            ui = input("\t>")
            os.system("cls")

            #   STEP 4: Verify input to be type integer
            try:
                #   STEP 5: Try to cast to int
                ui = int(ui)

                #   STEP 6: Check that the int is in range
                if ((ui >= 0) and (ui < len(_lMenu))):
                    #   STEP 7: Return
                    return ui

                elif (_bBreakOnInvalid):
                    return -1

                else:
                    #   STEP 8: Not in range
                    print("Rae {" + Helga.time() + "} - Invalid Input")

            except:
                if (_bBreakOnInvalid):
                    break

                print("Rae (Menu-UI) {" + Helga.time() + "} - Invalid Input")

        return -1

    #
    #   endregion

    #
    #endregion


#
#endregion
Beispiel #16
0
class Antonio:

    #region Init
    """
        Description:

            Contains various activation functions along witht their derivatives.
    """
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Antonio
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region Linear

        self.__fC_linear = self.__cf.data["parameters"]["linear"]["c"][
            "default"]

        #   endregion

        #   region Logistic

        self.__fC_logisitic = self.__cf.data["parameters"]["logistic"]["c"][
            "default"]

        #   endregion

        #   region TanH

        self.__fC_tanh = self.__cf.data["parameters"]["tanh"]["c"]["default"]
        self.__fM_tanh = self.__cf.data["parameters"]["tanh"]["magnitude"][
            "default"]

        #   endregion

        #   region Relu

        self.__fC_relu = self.__cf.data["parameters"]["relu"]["c"]["default"]

        #   endregion

        #   region Leaky-Relu

        self.__fC_lRelu_Pos = self.__cf.data["parameters"]["leaky relu"]["c"][
            "positive"]["default"]
        self.__fC_lRelu_Neg = self.__cf.data["parameters"]["leaky relu"]["c"][
            "negative"]["default"]

        #   endregion

        #   region Elu

        self.__fC_elu_lin = self.__cf.data["parameters"]["elu"]["c"]["linear"][
            "default"]
        self.__fC_elu_exp = self.__cf.data["parameters"]["elu"]["c"][
            "exponential"]["default"]

        #   endregion

        #   region Srelu

        self.__fC_srelu_lower = self.__cf.data["parameters"]["srelu"]["c"][
            "lower"]["default"]
        self.__fC_srelu_center = self.__cf.data["parameters"]["srelu"]["c"][
            "center"]["default"]
        self.__fC_srelu_upper = self.__cf.data["parameters"]["srelu"]["c"][
            "upper"]["default"]

        self.__fBoundary_srelu_lower = self.__cf.data["parameters"]["srelu"][
            "boundary"]["lower"]["default"]
        self.__fBoundary_srelu_upper = self.__cf.data["parameters"]["srelu"][
            "boundary"]["upper"]["default"]

        #   endregion

        #   region Gaussian

        self.__fC_gaussian = self.__cf.data["parameters"]["gaussian"]["c"][
            "default"]

        #   endregion

        #endregion

        #   STEP 2: Return
        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Gets

    def getActivation(self, _iIn: int, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check activation functions - Essentially a switch case
        if (_iIn == 0):
            return self.linear(_fIn)
        elif (_iIn == 1):
            return self.logistic(_fIn)
        elif (_iIn == 2):
            return self.tanH(_fIn)
        elif (_iIn == 3):
            return self.relu(_fIn)
        elif (_iIn == 4):
            return self.leakyRelu(_fIn)
        elif (_iIn == 5):
            return self.elu(_fIn)
        elif (_iIn == 6):
            return self.srelu(_fIn)
        else:
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Antonio.getActivation() - > Step 2: Invalid activation function passed"
            )

    def getActivationD(self, _iIn: int, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check activation functions
        if (_iIn == 0):
            return self.linearD()
        elif (_iIn == 1):
            return self.logisticD(_fIn)
        elif (_iIn == 2):
            return self.tanHD(_fIn)
        elif (_iIn == 3):
            return self.reluD(_fIn)
        elif (_iIn == 4):
            return self.leakyReluD(_fIn)
        elif (_iIn == 5):
            return self.eluD(_fIn)
        elif (_iIn == 6):
            return self.sreluD(_fIn)
        else:
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Antonio.getActivationD() -> Step 2: Invalid activation function passed"
            )

    #
    #   endregion

    #   region Front-End: Sets

    def setFunction(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Be safe
        try:
            #   STEP 3: Check if function = linear
            if (kwargs["function"] == 0):
                #   STEP 4: Outsource
                self.__setLinear(kwargs)

            #   STEP 5: Check if function = logistic
            elif (kwargs["function"] == 1):
                #   STEP 6: Outsource
                self.__setLogistic(kwargs)

            #   STEP 7: Check if function = tanh
            elif (kwargs["function"] == 2):
                #   STEP 8: Outsource
                self.__setTanh(kwargs)

            #   STEP 9: Check if srelu
            elif (kwargs["function"] == 6):
                #   STEP 10: Outsource
                self.__setSrelu__(kwargs)

            #   STEP 11: Function not implemented
            else:
                #   STEP 12: Error handling
                raise Exception(
                    "An error occured in Antonio.setFunction() -> Step 9: That activation function isn't fully implemented yet"
                )

        except Exception as ex:
            #   STEP 13: Error handling
            print("Initial Error: ", ex)
            raise Exception("An error occured in Antonio.setFunction()")

        #   STEP 14: Return
        return

    #
    #   endregion

    #   region Front-End: Linear

    def linear(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Return
        return _fIn * self.__fC_linear

    def linearD(self) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Return
        return self.__fC_linear

    #
    #   endregion

    #   region Front-End: Logistic

    def logistic(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables
        fOut = 1.0 / (1.0 + mt.exp(-1.0 * self.__fC_logisitic * _fIn))

        #   STEP 2: Return
        return fOut

    def logisticD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        fOut = 0.0

        #   STEP 1: Setup - Local variables
        fOut = self.logistic(_fIn)

        #   STEP 2: Compute derivative
        fOut = fOut * (1.0 - fOut)
        fOut = self.__fC_logisitic * fOut

        #   STEP 3: Return
        return fOut

    #
    #   endregion

    #   region Front-End: TanH

    def tanH(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Return
        return (self.__fM_tanh * np.tanh(self.__fC_tanh * _fIn))

    def tanHD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        fOut = 0.0

        #   STEP 1: Setup - Local variables
        fOut = self.tanH(_fIn)

        #   STEP 2: Compute derivative
        fOut = self.__fM_tanh * self.__fC_tanh * (1.0 - fOut * fOut)

        #   STEP 3: Return
        return fOut

    #
    #   endregion

    #   region Front-End: Relu

    def relu(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check relu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return (self.__fC_relu * _fIn)

        else:
            #   STEP 4: Return of the Return
            return 0.0

    def reluD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check relu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return self.__fC_relu

        else:
            #   STEP 4: Return of the Return
            return 0.0

    #
    #   endregion

    #   region Front-End: Leaky-Relu

    def leakyRelu(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check Leaky Relu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return self.__fC_lRelu_Pos * _fIn

        else:
            #   STEP 4: Return of the Return
            return self.__fC_lRelu_Neg * _fIn

    def leakyReluD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check Leaky Relu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return self.__fC_lRelu_Pos

        else:
            #   STEP 4: Return of the Return
            return self.__fC_lRelu_Neg

    #
    #   endregion

    #   region Front-End: Elu

    def elu(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check elu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return self.__fC_elu_lin * _fIn

        else:
            #   STEP 4: Return of the Return
            return (self.__fC_elu_exp * (mt.exp(_fIn) - 1))

    def eluD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check elu condition
        if (_fIn > 0):
            #   STEP 3: Return
            return self.__fC_elu_lin

        else:
            #   STEP 4: Return of the Return
            return (self.elu(_fIn) - self.__fC_elu_exp)

    #
    #   endregion

    #   region Front-End: Srelu

    def srelu(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        fOut = 0.0

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check srelu condition
        if (_fIn <= self.__fBoundary_srelu_lower):
            #   STEP 3: If under lower boundary
            fOut = self.__fBoundary_srelu_lower
            fOut = fOut + self.__fC_srelu_lower * (_fIn - fOut)

        elif (_fIn >= self.__fBoundary_srelu_upper):
            #   STEP 4: If above upper boundary
            fOut = self.__fBoundary_srelu_upper
            fOut = fOut + self.__fC_srelu_upper * (_fIn - fOut)

        else:
            #   STEP 5: If center
            fOut = self.__fC_srelu_center * _fIn

        #   STEP 6: Return
        return fOut

    def sreluD(self, _fIn: float) -> float:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check srelu condition
        if (_fIn <= self.__fBoundary_srelu_lower):
            #   STEP 3: If under lower boundary
            return self.__fC_srelu_lower

        elif (_fIn >= self.__fBoundary_srelu_upper):
            #   STEP 4: If above upper boundary
            return self.__fC_srelu_upper

        else:
            #   STEP 5: If center
            return self.__fC_srelu_center

    #
    #   endregion

    #
    #endregion

    #region Back-End

    def __setLinear(self, _dData: dict) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if c is specified
        if ("c" in _dData):
            #   STEP 3: Set c
            self.__fC_linear = _dData["c"]

        #   STEP 4: Return
        return

    def __setLogistic(self, _dData: dict) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: check if c is specified
        if ("c" in _dData):
            #   STEP 3: Set c
            self.__fC_logisitic = _dData["c"]

        #   STEP 4: Return
        return

    def __setTanh(self, _dData: dict) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if c is specified
        if ("c" in _dData):
            #   STEP 3: Set c
            self.__fC_tanh = _dData["c"]

        #   STEP 4: Check if m is specified
        if ("m" in _dData):
            #   STEP 5: Set m
            self.__fM_tanh = _dData["m"]

        #   STEP 6: Return
        return

    def __setSrelu__(self, kwargs: dict) -> None:
        """
            Description:

                Sets the variables for the srelu activation function.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + c = ( dict ) A dictionary containing the gradient functions
                    for the lower, center, and upper regions of the srelu
                    activation function
                    ~ Required

                    ~ "lower":  ( float )
                    ~ "center": ( float )
                    ~ "upper":  ( float )

                + boundary  = ( dict ) A dictionary containing the boundaries
                    for the srelu activation function
                    ~ Required

                    ~ "lower":  ( float )
                    ~ "upper":  ( float )


        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   region STEP 2->??: Error checking

        #   STEP 2: CHeck if c arg passed
        if ("c" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Antonio.__setSrelu__() -> Step 2: No c arg passed"
            )

        #   STEP 4: Check if boundary arg passed
        if ("boundary" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in Antonio.__setSrelu__() -> Step 4: No boundary arg passed"
            )

        #
        #   endregion

        #   STEP 6: Update - Class variables
        self.__fC_srelu_lower = kwargs["c"]["lower"]
        self.__fC_srelu_center = kwargs["c"]["center"]
        self.__fC_srelu_upper = kwargs["c"]["upper"]

        self.__fBoundary_srelu_lower = kwargs["boundary"]["lower"]
        self.__fBoundary_srelu_upper = kwargs["boundary"]["upper"]

        #   STEP 7: Return
        return
Beispiel #17
0
class Golem:

    #region Init

    """
        Description:
        
            This class creates and trains multiple surrogate models using the
            provided dataset. It then uses multi-variate optimization
            algorithms to map the surrogate models in order to find the best
            solution for the provided dataset.

        |\n
        |\n
        |\n
        |\n
        |\n

        Arguments:

            + numSurrogates = ( int ) The number of surrogates that the class
                should use during the training and surface mapping process
                ~ Required
    """

    def __init__(self, **kwargs) -> None:
        
        #   region STEP 0: Local Variables

        self.__enum                     = en.Golem
        self.__cf                       = Conny()
        self.__cf.load(self.__enum.value)
        
        #
        #   endregion

        #   region STEP 1: Private Variables

        #   STEP 1.1: Surrogate variables
        self.__iSurrogates              = None

        self.__lSRG                     = []
        self.__lSRG_FItness             = []
        self.__lSRG_Accuracy            = []
        
        #   STEP 1.3: Surrogate generation variables
        self.__iSRG_Min                 = self.__cf.data["parameters"]["surrogates"]["min"]
        self.__iSRG_Max                 = self.__cf.data["parameters"]["surrogates"]["max"]
        
        self.__fSRG_AccRequirement      = self.__cf.data["parameters"]["surrogates"]["accuracy requirement"]

        #   STEP 1.4: Other variables
        self.__bAllowTesting            = self.__cf.data["parameters"]["allow testing"]

        #
        #   endregion

        #   region STEP 2: Public Variables

        #   STEP 2.2: Results
        self.vBest                      = None
        self.vFitness                   = None

        self.lMap_Fitness               = None
        self.lMap_Results               = None

        #   STEP 2.3: Bools
        self.bShowOutput                = self.__cf.data["parameters"]["show output"]
        
        self.bSRG_Random                = self.__cf.data["parameters"]["bools"]["rand surrogate"]
        self.bSRG_RandomParameters      = self.__cf.data["parameters"]["bools"]["rand surrogate params"]

        #
        #   endregion

        #   region STEP 3->4: Error checking

        #   STEP 3: Check if numSurrogates arg passed
        if ("numSurrogates" not in kwargs):
            #   STEP 4: Error handling
            raise Exception("An error occured in Golem.__init__() -> Step 3: No numSurrogates arg passed")

        #
        #   endregion

        #   STEP 5: Update - Private variables
        self.__iSurrogates = kwargs["numSurrogates"]

        #   STEP 6: Return
        return
    
    #
    #endregion

    #region Front-End

    #   region Front-End: Import/Export

    def importGolem(self, **kwargs) -> None:
        """
            ToDo:

                + Implement function
        """

        #   STPE 0: Local variables
        #   STEP 1: Setup - Local variables
        #   STEP 2: ??
        #   STEP ??: Return
        return

    def exportGolem(self, **kwargs) -> None:
        """
            ToDo:

                + Implement function
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables
        #   STEP 2: ??
        #   STEP ??: Return
        return

    #
    #   endregion

    #   region Front-End: Train and Map (Teenage Mutant Ninja Turtles)

    def trainAndMap(self, **kwargs) -> dict:
        """
            Description:

                Uses multi-threading to train and map multiple surrogate models
                simultaneously.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + data  = ( vars ) Data container instance that contains the
                    dataset required for training and mapping

                + region    = ( float ) The region within which random 
                    generation is allowed
                    ~ Required

                + rand  = ( bool ) A flag that specifies whether or not random
                    parameters are allowed for the training and mapping process

                + remap = ( bool ) A flag to indicate if the results of the
                    mapping should be un-normalized via the dataset

            |\n

            Returns:

                + dOut = ( dict ) A dictionary containing the best surrogate
                    result and the fitness of the results
                    ~ "result"  = ( dict )
                    ~ "fitness" = ( dict )
                    
            |\n

            ToDo:

                + Add start arg
        """

        #   STEP 0: Local variables

        #   STEP 1: Setup - Local variables

        #   region STEP 2->10: Error checking

        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.trainAndMap() -> Step 2: No data arg passed")

        #   STEP 4: Check if region arg passed
        if ("region" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Golem.trainAndMap() -> Step 4: No region arg passed")

        #   STEP 6: Check if rand arg passed
        if ("rand" in kwargs):
            #   STEP 7: Update - Class var
            self.bSRG_Random = kwargs["rand"]

        #
        #   endregion

        #   STEP 9: Train surrogates
        self.__train_srgOverseer__(data=kwargs["data"], region=kwargs["region"])

        if  ( True ):
            print("\t\t- Fitness: " + str(self.__lSRG_FItness[0]), "Accuracy: " + str(self.__lSRG_Accuracy[0]), sep="\t", end="\n\n")

        #   STEP 10: Map surrogates
        self.__map_srgOverseer__(data=kwargs["data"])

        #   STEP 11: Check for remapping
        if ("remap" in kwargs):
            #   STEP 12: Check remapping state
            if (kwargs["remap"]):
                #   STEP 13: Remap
                self.vBest = kwargs["data"].remap( candidate=cp.deepcopy( self.vBest ) )

                #   STEP 14: Loop through best candidates
                for i in range(0, len( self.lMap_Results )):
                    #   STEP 15: Outsource - Remapping
                    self.lMap_Results[i] = kwargs["data"].remap( candidate=cp.deepcopy( self.lMap_Results[i] ))

        #   STEP 16: User output
        if ( self.bShowOutput ):
            print("\n\t~ Train and Map results:")
            print("\t\t- Candidate: ", Helga.round(self.lMap_Results[0], 2))
            print("\t\t- Fitness: ", round(self.lMap_Fitness[0], 2), end="\n\n")

        #   STEP 17: Populate output dict
        dOut = {
            "result":   self.vBest,
            "fitness":  self.vFitness
        }

        #   STEP 15: Return
        return dOut

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Inits
    
    def __initSRGParams__(self, **kwargs) -> dict:
        """
            Description:

                Recursively iterates through the antenna surrogate parameters
                and randomizes them

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + params    = ( dict ) A dictionary containing the parameters
                    for a surrogate model
                    ~ Requried

                + scalars   = ( dict ) A dictionary containing the scalar
                    values for a surrogate model
                    ~ Required

                + region    = ( int ) The region in which random generation is
                    allowed
                    ~ Required
        """

        #   STEP 0: Local variables
        dOut                    = None

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check if params arg passed
        if ("params" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.__initSRGParms__() -> Step 2: No params arg passed")

        #   STEP 4: Check if scalars arg passed
        if ("scalars" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Golem.__initSRGParams__() -> Step 4: No scalars arg passed")

        #   STEP 6: Check if region arg passed
        if ("region" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Golem.__initSRGParams__() -> Step 6: No region arg passed")

        #
        #   endregion

        #   STEP 8: Update - Local variables
        dOut    = kwargs["params"]

        #   STEP 9: Check if random parameters allowed
        if (self.bSRG_RandomParameters == False):
            #   STEP 10: Return
            return dOut

        #   STEP 11: Loop through parameters in current dictionary
        for i in range(0, dOut["items"]):
            #   STEP 12: Get child entry name
            sTmp_Child  = dOut[str(i)]

            #   STEP 13: Check if in scalars
            if (sTmp_Child in kwargs["scalars"]):
                #   STEP 14: Be safe
                try:
                    #   STEP 15: Check if field contains child dictionary
                    if ("items" in dOut[sTmp_Child]):
                        #   STEP 16: Outsource
                        self.__initSRGParams__(params=dOut[sTmp_Child], scalars=kwargs["scalars"][sTmp_Child], region=kwargs["region"])

                #   STEP 17: Shortcut
                except:
                    #   STEP 18: Outsoruce
                    dOut[sTmp_Child] = self.__randVal__(param=dOut[sTmp_Child], scalar=kwargs["scalars"][sTmp_Child], region=kwargs["region"])


        #   STEP 20: Return
        return dOut

    def __initGlobals__(self) -> None:
        """
            Description:

                Initializes all the required global variables for this class.
        """

        #   STEP -1: Global variables
        global  thread_sUI
        global  thread_eUI
        global  thread_lUI

        global  thread_lTR_Results
        global  thread_lTR_Lock
        global  thread_eTR_Exit

        global  thread_eEX

        global  thread_eGlobal

        #   STEP 0: Local variables
        lTmp                    = None

        #   STEP 1: Setup - Local variables

        #   STEP 2: Setup - Global UI variables
        thread_sUI      = ""
        thread_eUI      = tr.Event()
        thread_lUI      = tr.Lock()
        
        #   STEP 3: Setup - Training thread variables
        lTmp = []

        #   STEP 4: Loop through required surrogates
        for _ in range(0, self.__iSRG_Max):
            #   STEP 5: Append empty var as holder
            lTmp.append(None)

        #   STEP 6: Set global variable
        thread_lTR_Results  = lTmp
        thread_lTR_Lock     = tr.Lock()

        #   STEP 7: Setup - Training exit event
        thread_eTR_Exit = tr.Event()
        thread_eTR_Exit.clear()

        #   STEP 8: Setup - Global exit event
        thread_eEX      = tr.Event()
        thread_eEX.clear()

        #   STEP 9: Setup - Global event signal
        thread_eGlobal  = tr.Event()
        thread_eGlobal.clear()

        #   STEP 10: Return
        return

    #
    #   endregion

    #   region Back-End: Gets

    def __randVal__(self, **kwargs) -> float:
        """
            Description:

                Gets a random value using the provided arguments.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + param = ( float ) The parameters to be randomized
                    ~ Required

                + scalar    = ( dict ) The scalar values to use during the
                    randomization process
                    ~ Required

                + region    = ( float ) The region within which random
                    generation is allowed
                    ~ Required
        """

        #   STEP 0: Local variables
        dOut                = None

        fRand               = rn.random()

        bRegion             = False

        #   STEP 1: Setup - Local variables
        
        #   region STEP 2->7: Error checking

        #   STEP 2: Check if param arg passed
        if ("param" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.__randVal__() -> Step 2: No param arg passed")

        #   STEP 4: Check if scalar arg passed
        if ("scalar" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Golem.__randVal__() -> Step 4: No scalar arg passed")

        #   STEP 6: Check region arg passed
        if ("region" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Golem.__randVal__() -> Step 6: No region arg passed")

        #
        #   endregion
        
        #   STEP 7: Update - Local variables
        dOut        = kwargs["param"]

        #   STEP 8: Check if positive
        if (fRand <= kwargs["scalar"]["region"]):
            #   STEP 9: Set region = True for pos
            bRegion = True

        #   STEP 10: Check if positive region
        if (bRegion):
            #   STEP 11: Get pos offset
            fTmp_Offset_Pos = rn.random() * kwargs["region"] * kwargs["scalar"]["range"]["positive"]

            #   STEP 12: Add offset to output
            dOut += fTmp_Offset_Pos

        #   STEP 13: Then negative
        else:
            #   STEP 14: Get neg offset
            fTmp_Offset_Neg = rn.random() * kwargs["region"] * kwargs["scalar"]["range"]["negative"]

            #   STEP 15: Subtract offset from output
            dOut -= fTmp_Offset_Neg

        #   STEP 16: Check if type defined
        if ("type" in kwargs["scalar"]):
            #   STPE 17: Check if int
            if (kwargs["scalar"]["type"] == "int"):
                #   STEP 18: Cast to int
                dOut = int(dOut)

        #   STEP 19: Return
        return dOut

    def __getSurrogate__(self, **kwargs) -> dict:
        """
            Description:
            
                Initializes a surrogate model using the parameters from the
                config files.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + region    = ( float ) The region within which random
                    generation is allowed
                    ~ Required
        """

        #   STEP 0: Local variables
        vSRG_New                = None
        eSRG_Enum               = None

        dSRG_Params             = None
        dSRG_Scalars            = None
        
        iSRG_Index              = None

        lSRG_Active             = Surrogates.getActiveSurrogates()
        lSRG_Parameters         = Surrogates.getActiveParameters()
        lSRG_Scalars            = Surrogates.getActiveScalars()

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if region arg passed
        if ("region" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.__getSurrogate__() -> Step 2: No region arg passed")

        #   STEP 4: Check if random surrogate selection allowed
        if (self.bSRG_Random):
            #   STEP 5: Get random surrogate enum index
            iSRG_Index  = rn.randint(0, len(lSRG_Active) - 1)

            #   STEP 6: Set new surrogate enum
            eSRG_Enum   = lSRG_Active[iSRG_Index]

        else:
            #   STEP 7: Select default surrogate (i.e: Annie)
            iSRG_Index  = 0
            eSRG_Enum   = lSRG_Active[0]

        #   STEP 8: Get new surrogate parameters and scalars
        dSRG_Params     = cp.deepcopy(lSRG_Parameters[iSRG_Index])
        dSRG_Scalars    = cp.deepcopy(lSRG_Scalars[iSRG_Index])

        #   STEP 9: Adjust surrogate parameters
        dSRG_Params     = self.__initSRGParams__(params=dSRG_Params, scalars=dSRG_Scalars, region=kwargs["region"])

        #   STEP 10: Create new surrogate
        vSRG_New        = Surrogates.getNewSurrogate(surrogate=eSRG_Enum, params=dSRG_Params)

        #   STEP 11: Populate output dictionary
        dTmp_Out        = {
            "surrogate":    vSRG_New,
            "params":       dSRG_Params,
            "enum":         eSRG_Enum
        }

        #   STEP 12: Return
        return dTmp_Out

    #
    #   endregion

    #   region Back-End: Training

    def __train_srgOverseer__(self, **kwargs) -> None:
        """
            Description:

                Trains the initialized surrogates using the provided dataset.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + data  = ( vars ) A data container containing the data set for
                    the training process
                    ~ Required

                + region    = ( float ) The region within which random 
                    number generation can cocured
                    ~ Required
        """

        #   STEP 0: Local variables
        eGlobal                 = None
        eGlobal_Exit            = None

        eUI_Event               = None
        qUI_Queue               = None
        tUI_Thread              = None

        eTR_Event               = None
        qTR_Queue               = None
        tTR_Thread              = None

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.__train_srgOverseer__() -> Step 2: No data arg passed")
        
        #   STEP 4: Check if region arg passed
        if ("region" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Golem.__train_srgOverseer__() -> Step 4: No region arg passed") 
        
        #
        #   endregion

        #   STEP 8: Setup - Global events
        eGlobal         = mp.Event()
        eGlobal.clear()

        eGlobal_Exit    = mp.Event()
        eGlobal_Exit.clear()

        #   STEP 9: Setup - UI variables
        eUI_Event       = mp.Event()
        eUI_Event.clear()

        qUI_Queue       = mp.Queue()

        tUI_Thread          = tr.Thread(target=self.__threadUI__, args=(eGlobal_Exit, eGlobal, eUI_Event, qUI_Queue, ))
        tUI_Thread.daemon   = True
        #tUI_Thread.start()
        
        #   STEP 9: Setup - Training thread
        eTR_Event           = mp.Event()
        eTR_Event.clear()

        dTmp_Train = {
            "min":      self.__iSRG_Min,
            "max":      self.__iSRG_Max,
            "acc":      self.__fSRG_AccRequirement,

            "region":   kwargs["region"],

            "data":     kwargs["data"]
        }

        qTR_Queue           = mp.Queue()
        qTR_Queue.put([dTmp_Train])

        tTR_Thread          = tr.Thread(target=self.__train_srgHandler__, args=(eGlobal_Exit, eGlobal, eTR_Event, qTR_Queue, ))
        tTR_Thread.daemon   = True
        tTR_Thread.start()

        #   STEP 11: Loop until exit
        while (True):
            #   STEP 12: Wait for global event
            eGlobal.wait()

            #   STEP 13: Clear event
            eGlobal.clear()

            #   STEP 15: Check if ui
            if (eUI_Event.is_set()):
                #   STEP 16: Check if input is "exit"
                if (qUI_Queue.get()[0] == "exit"):
                    #   STEP 17: Set global exit event
                    eGlobal_Exit.set()

                    #   STEP 18: Wait for threads
                    #tUI_Thread.join()
                    tTR_Thread.join()

                    #   STEP 19: Exit loop
                    break
                
            #   STEP 20: Check if training completed
            if (eTR_Event.is_set()):
                #   STEP 21: Set global exit event
                eGlobal_Exit.set()
                #tUI_Thread.join()

                #   STEP 22: Exit loop
                break

        #   STEP 23: Get results from training
        dTmp_SrgResults = qTR_Queue.get()[0]

        #   STEP 24: Update - Class variables
        self.__lSRG_Accuracy    = dTmp_SrgResults["accuracy"]
        self.__lSRG_FItness     = dTmp_SrgResults["fitness"]
        self.__lSRG             = dTmp_SrgResults["results"]

        #   STEP 25: Return
        return

    def __train_srgHandler__(self, _eExit, _eGlobal, _eTR, _qTR) -> None:
        """
            Description:

                Iteratively trains surrogates until it either finds a surrogate
                that meets the minimum accuracy requirements or until it has
                trained the maximum amount of surrogates.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + _eExit    = ( mp.Event() ) Global exit event

                + _eGlobal  = ( mp.Event() ) Global thread result event

                + _eTR      = ( mp.Event() ) Surrogate training result event

                + _qTR      = ( mp.Queue() ) Surrogate training result queue
        """

        #   STEP 0: Local variables
        dArgs                   = _qTR.get()[0]

        vData                   = dArgs["data"]

        lAccuracy               = []
        lFitness                = []
        lResults                = []

        fBest_Fitness           = np.inf
        iBest_Index             = 0

        print("\t{" + Helga.time() + "} - Starting surrogate training\t\t-> ", end="")

        #   STEP 1: Iterate through max surrogates
        for i in range(0, dArgs["max"]):
            #   STEP 2: Get new surrogate
            dSRG    = self.__getSurrogate__(region=dArgs["region"])

            vSRG    = dSRG["surrogate"]

            #   STEP 3 Do necessary pre-training
            vSRG.bShowOutput            = False
            vSRG.bUse_NoiseInjection    = True
            vSRG.bUse_L1                = True
                
            #   STEP 4: Train surrogate
            fTmp_Fitness    = None

            fTmp_Fitness    = vSRG.trainSet(cp.deepcopy(vData), advanced_training=False, compare=False)
            fTmp_Fitness    = fTmp_Fitness["fitness"]

            #   STEP 5: Get accuracy and fitness
            fTmp_Accuracy   = vSRG.getAccuracy(data=vData, size=vData.getLen(), full_set=True)

            fTmp_Fitness    = fTmp_Fitness * ( 1.1 - fTmp_Accuracy["percent accuracy"] )

            #   STEP 6: Append to output lists
            lAccuracy.append( fTmp_Accuracy["percent accuracy"] )
            lFitness.append( fTmp_Fitness )
            lResults.append( vSRG )

            #   STEP 7: Check if fittest surrogate so far
            if ((fTmp_Fitness < fBest_Fitness) and (fTmp_Accuracy["percent accuracy"] == 1.0)):
                #   STEP 8: Update - Local variables
                fBest_Fitness   = fTmp_Fitness
                iBest_Index     = i

                #   STEP 9: User output - minimal
                print("!", end="")

            #   STEP 10: Check if 100p accuracy
            elif (fTmp_Accuracy["percent accuracy"] == 1.0):
                #   STEP 11: Minimal output
                print(":", end="")

            #   STEP 12: Not 100p but best fitness
            elif (fTmp_Fitness < fBest_Fitness):
                #   STEP 13: Update - Local variables
                fBest_Fitness   = fTmp_Fitness
                iBest_Index     = i

                #   STEP 14: User output - minimal
                print("#", end="")

            #   STEP 15: Bad surrogate
            else:
                #   STEP 16: User outptu - minimal
                print(".", end="")

            #   STEP 17: Check if fitness below required and min surrogates generated 
            if ((fBest_Fitness < dArgs["acc"]) and (i >= dArgs["min"])):
                #   STEP 18: Exit loop early
                break

            #   STEP 19: Check if exit even
            if (_eExit.is_set()):
                #   STEP 20: Exit loop early
                break

        print("")
        
        #   STEP 21: Swap best surrogate to index = 0
        vTmp_Swap               = lAccuracy[0]
        lAccuracy[0]            = lAccuracy[iBest_Index]
        lAccuracy[iBest_Index]  = vTmp_Swap
        
        vTmp_Swap               = lFitness[0]
        lFitness[0]             = lFitness[iBest_Index]
        lFitness[iBest_Index]   = vTmp_Swap

        vTmp_Swap               = lResults[0]
        lResults[0]             = lResults[iBest_Index]
        lResults[iBest_Index]   = vTmp_Swap

        #   STEP 22: Populate output dictionary
        dOut    = {
            "accuracy": lAccuracy,
            "fitness":  lFitness,
            "results":  lResults
        }

        #   STEP 23: Put output in queue
        _qTR.put([dOut])

        #   STEP 24: Set events
        _eTR.set()
        _eGlobal.set()

        #   STEP 25: Return
        return

    #
    #   endregion

    #   region Back-End: Mapping

    def __map_srgOverseer__(self, **kwargs) -> None:
        """
            Description:

                Maps the fittest surrogate as well as all the other surrogates
                whose fitness were within the required range
            
            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + data  = ( vars ) A Data container containing the dataset to
                    be used during the mapping process
                    ~ Required
        """

        #   STEP 0: Local variables
        optimizer               = Hermione()
        optimizer.bShowOutput   = False

        self.vBest              = None
        self.vFitness           = np.inf

        fTmp_Fitness            = np.inf

        #   STEP 1: Setup - Local variables
        self.lMap_Results     = []
        self.lMap_Fitness     = []
        
        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Golem.__map_srgOverseer__() -> Step 2: No data arg passed")

        #   STEP 5: If optimizer output
        if (optimizer.bShowOutput):
            print("")

        else:
            print("\t{" + Helga.time() + "} - Starting surrogate mapping\t\t\t-> ", end="")

        try:

            #   STEP 6: Loop through surrogates
            for i in range(0, len(self.__lSRG)):
                #   STEP 7: Setup - Scope varibales
                dTmp_MapResults     = None

                #   STEP 8: Best candidate
                if (i == 0):
                    #   STEP 9: Outsource threaded mapping
                    dTmp_MapResults = optimizer.mapSurrogate(threading=False, data=kwargs["data"], surrogate=self.__lSRG[i])

                #   STEP 10: Else if accuracy = 100%
                elif ((self.__lSRG_Accuracy[i] == 1.0) or (self.__lSRG_FItness[i] == self.__lSRG_FItness[0])):
                    #   STEP 11: Outsource mapping
                    dTmp_MapResults = optimizer.mapSurrogate(threading=False, data=kwargs["data"], surrogate=self.__lSRG[i])

                #   STPE 12: CHeck if there are results
                if (dTmp_MapResults != None):
                    #   STEP 13: Append to results
                    self.lMap_Results.append(dTmp_MapResults["result"])
                    self.lMap_Fitness.append(dTmp_MapResults["fitness"])

                    #   STEP 14: Check - Optimizer output status
                    if (optimizer.bShowOutput == False):
                        #   STEP 15: Check if new results are best
                        if (dTmp_MapResults["fitness"] < fTmp_Fitness):
                            #   STEP 16: Update - Local variables
                            fTmp_Fitness    = dTmp_MapResults["fitness"]

                            #   STEP 17: User output
                            print("!", end="")

                        #   STEP 18: Not new best
                        else:
                            #   STEP 19: User output
                            print(".", end="")

            #   STEP 14: Setup - Best results
            self.vBest      = self.lMap_Results[0]
            self.vFitness   = self.lMap_Fitness[0]

        except Exception as ex:
            print("Initial error: ", ex)
            print("An error occured in Golem.__map_srgOverseer__()")

        #   STEP 15: Return
        return

    #
    #   endregion

    #   region Back-End: Threading
    
    def __threadUI__(self, _eGlobal_Exit, _eGlobal, _eUI, _qReturn) -> None:
        """
            Description:

                Run as Thread(). Gets input without blocking and returns via
                the passed mp.Queue()

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eGlobal_Exit  = ( mp.Event() ) Event signalling global exit
                    for threads and processes

                + _eGlobal  = ( mp.Event() ) Event signalling global action

                + eUI       = ( mp.Event() ) Event signalling input pushed to
                    the output mp.Queue

                + _qReturn  = ( mp.Queue() ) The queue onto which user input
                    should be returned
        """

        #   STEP 0: Local variables
        tBlocking           = None

        qBlocking           = mp.Queue()
        eBlocking           = mp.Event()

        #   STEP 1: Setup - Local variables
        eBlocking.clear()

        #   STEP 2: Setup - Threaded blocking ui
        tBlocking           = tr.Thread(target=self.__threadUI_Blocking__, args=(eBlocking, qBlocking, ) )
        tBlocking.daemon    = True
        tBlocking.start()

        #   STEP 3: Loop to infinity and beyond
        while (True):
            #   STEP 4: Check for global exit event
            if (_eGlobal_Exit.is_set()):
                #   STEP 5: Exit
                break

            #   STEP 6: Check for input from blocking thread
            if (eBlocking.is_set()):
                #   STEP 7:Clear event and pass input along
                eBlocking.clear()

                _qReturn.put( qBlocking.get() )

                #   STEP 8: Set UI event and global event
                _eUI.set()
                _eGlobal.set()

            #   STEP 9: Sleep
            t.sleep(0.1)

        #   STEP 20: Return
        return

    def __threadUI_Blocking__(self, _eUI, _qReturn) -> None:
        """
            Description:

                Run as Thread(). Gets blocking input and returns via the passed
                mp.Queue()

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eUI       = ( mp.Event() ) Event signalling input pushed to
                    the output mp.Queue

                + _qReturn  = ( mp.Queue() ) The queue onto which user input
                    should be returned
        """

        #   STEP 0: Local variables

        #   STEP 1: Setup - Local varibales

        #   STEP 2: Loop to infinity
        while (True):
            #   STEP 3: Check - _eUI status
            if (_eUI.is_set()):
                #   STEP 4: Wait for it to finish
                _eUI.wait()

            #   STEP 5: Get input
            sTmp_Input  = input()

            #   STEP 6: Push input to queue
            _qReturn.put([sTmp_Input])

            #   STEP 7: Set event
            _eUI.set()

        #   STEP 8: Return
        return

    def __clearGlobals__(self) -> None:
        """
            Description:

                Clears all the global variables associated with this class.
        """

        #   STEP 0: Local variables
        global  thread_sUI
        global  thread_eUI
        global  thread_lUI

        global  thread_lTR_Results
        global  thread_lTR_Lock
        global  thread_eTR_Exit

        global  thread_eEX

        global  thread_eGlobal

        #   STEP 1: Setup - Local variables
        thread_lTR_Lock.acquire()
        thread_lTR_Lock.release()

        #   STEP 2: Yeet
        thread_sUI          = None
        thread_eUI          = None
        thread_lUI.release()
        thread_lUI          = None

        thread_lTR_Results  = None
        thread_lTR_Lock     = None
        thread_eTR_Exit     = None

        thread_eEX          = None

        thread_eGlobal      = None

        #   STEP 3: Return
        return

    #
    #   endregion

    #
    #endregion

#
#endregion

#region Testing

#
#endregion
Beispiel #18
0
    def __init__(self, **kwargs) -> None:
        
        #   region STEP 0: Local Variables

        self.__enum                     = en.Golem
        self.__cf                       = Conny()
        self.__cf.load(self.__enum.value)
        
        #
        #   endregion

        #   region STEP 1: Private Variables

        #   STEP 1.1: Surrogate variables
        self.__iSurrogates              = None

        self.__lSRG                     = []
        self.__lSRG_FItness             = []
        self.__lSRG_Accuracy            = []
        
        #   STEP 1.3: Surrogate generation variables
        self.__iSRG_Min                 = self.__cf.data["parameters"]["surrogates"]["min"]
        self.__iSRG_Max                 = self.__cf.data["parameters"]["surrogates"]["max"]
        
        self.__fSRG_AccRequirement      = self.__cf.data["parameters"]["surrogates"]["accuracy requirement"]

        #   STEP 1.4: Other variables
        self.__bAllowTesting            = self.__cf.data["parameters"]["allow testing"]

        #
        #   endregion

        #   region STEP 2: Public Variables

        #   STEP 2.2: Results
        self.vBest                      = None
        self.vFitness                   = None

        self.lMap_Fitness               = None
        self.lMap_Results               = None

        #   STEP 2.3: Bools
        self.bShowOutput                = self.__cf.data["parameters"]["show output"]
        
        self.bSRG_Random                = self.__cf.data["parameters"]["bools"]["rand surrogate"]
        self.bSRG_RandomParameters      = self.__cf.data["parameters"]["bools"]["rand surrogate params"]

        #
        #   endregion

        #   region STEP 3->4: Error checking

        #   STEP 3: Check if numSurrogates arg passed
        if ("numSurrogates" not in kwargs):
            #   STEP 4: Error handling
            raise Exception("An error occured in Golem.__init__() -> Step 3: No numSurrogates arg passed")

        #
        #   endregion

        #   STEP 5: Update - Private variables
        self.__iSurrogates = kwargs["numSurrogates"]

        #   STEP 6: Return
        return
Beispiel #19
0
class Heimi:

    #region Init
    """
    """
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Heimi
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #endregion

        #region STEP 2: Public variables

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #endregion

    #region Front-End

    def main(self):
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: User Output
        print("Heimi (Main) {" + Helga.time() +
              "} - Hi. What program would you like to run?")

        #   STEP 3: User Input
        while (True):
            ui = Rae.getUserInput(self.__enum, menu="main")
            os.system("cls")

            if (ui == 0):
                #   STEP 4: If OMA then send to Irene
                print("Heimi (Main.OMA) + {" + Helga.time() +
                      "} - Transferring control to Irene. See you soon.")

                irene = Irene()
                irene.main()

                print("Hiemi (Main.OMA) + {" + Helga.time() +
                      "} - Welcome back.")

            elif (ui == 1):
                #   STEP 5: Ha, jokes on you. There is no help for you
                print("Heimi (Main.Help) {" + Helga.time() +
                      "} - Ummmm, really? Lol")

            elif (ui == 2):
                #   STEP 6: Exit program
                print("Heimi (Main.Exit) {" + Helga.time() +
                      "} - Exiting program.")
                break

        #   STEP 7: Return
        return

    def configEditor(self) -> None:
        """
        """

        #   STEP 0: Local variables5
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        return
Beispiel #20
0
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.Antonio
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region Linear

        self.__fC_linear = self.__cf.data["parameters"]["linear"]["c"][
            "default"]

        #   endregion

        #   region Logistic

        self.__fC_logisitic = self.__cf.data["parameters"]["logistic"]["c"][
            "default"]

        #   endregion

        #   region TanH

        self.__fC_tanh = self.__cf.data["parameters"]["tanh"]["c"]["default"]
        self.__fM_tanh = self.__cf.data["parameters"]["tanh"]["magnitude"][
            "default"]

        #   endregion

        #   region Relu

        self.__fC_relu = self.__cf.data["parameters"]["relu"]["c"]["default"]

        #   endregion

        #   region Leaky-Relu

        self.__fC_lRelu_Pos = self.__cf.data["parameters"]["leaky relu"]["c"][
            "positive"]["default"]
        self.__fC_lRelu_Neg = self.__cf.data["parameters"]["leaky relu"]["c"][
            "negative"]["default"]

        #   endregion

        #   region Elu

        self.__fC_elu_lin = self.__cf.data["parameters"]["elu"]["c"]["linear"][
            "default"]
        self.__fC_elu_exp = self.__cf.data["parameters"]["elu"]["c"][
            "exponential"]["default"]

        #   endregion

        #   region Srelu

        self.__fC_srelu_lower = self.__cf.data["parameters"]["srelu"]["c"][
            "lower"]["default"]
        self.__fC_srelu_center = self.__cf.data["parameters"]["srelu"]["c"][
            "center"]["default"]
        self.__fC_srelu_upper = self.__cf.data["parameters"]["srelu"]["c"][
            "upper"]["default"]

        self.__fBoundary_srelu_lower = self.__cf.data["parameters"]["srelu"][
            "boundary"]["lower"]["default"]
        self.__fBoundary_srelu_upper = self.__cf.data["parameters"]["srelu"][
            "boundary"]["upper"]["default"]

        #   endregion

        #   region Gaussian

        self.__fC_gaussian = self.__cf.data["parameters"]["gaussian"]["c"][
            "default"]

        #   endregion

        #endregion

        #   STEP 2: Return
        return
class Sarah:

    #region Init

    """
    """

    def __init__(self):

        #region STEP 0: Local variables

        self.__enum                 = en.Sarah
        self.__config               = Conny()
        self.__config.load(self.__enum.value)
        
        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting        = self.__config.data["parameters"]["allow testing"]["default"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput            = self.__config.data["parameters"]["show output"]["default"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion
        
        return
        
    #
    #endregion

    #region Front-End

    #   region Front-End: Mapping

    def mapSurrogate(self, **kwargs) -> dict:
        """
            Description:

                Maps the passed surrogate using the specified optimizer.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + surrogate = ( vars ) The surrogate to map
                    ~ Required

                + data  = ( vars ) A Data container that contains the data
                    for the mapping process
                    ~ Required

                + optimizer = ( enum ) The optimization algorithm to use during
                    the mapping process
                    ~ Required
        """
        
        #   STEP 0: Local variables

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Sarah.mapSurrogate() -> Step 2: No surrogate arg passed")

        #   STEP 4: Check if optimizer arg passed
        if ("optimizer" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Sarah.mapSurrogate() -> Step 4: No surrogate arg passed")
        
        #   STEP 6: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Sarah.mapSurrogate() -> Step 6: No data arg passed")

        #
        #   endregion
        
        #   STEP 8: Check if PSO
        if (kwargs["optimizer"] == sw.PSO):
            #   STEP 9: User output
            if (self.bShowOutput):
                print("Sarah (map-srg) -> (map-srg-PSO) {" + Helga.time() + "}")

            #   STEP 10: Outsource to pso and return
            return self.__psoMapping__(surrogate=kwargs["surrogate"], data=kwargs["data"])

        #   STEP 11: Unrecognized optimizer - Error handling
        raise Exception("An error occured in Sarah.mapSurrogate() -> Step 11: Unrecognized optimizer")
        
    #
    #   endregion

    #   region Front-End: Training

    def trainSurrogate(self, **kwargs) -> dict:
        """
            Description:

                Trains the passed surrogate using the specified optimizer.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + surrogate   = ( vars ) The surrogate instance to be trained
                    ~ Required
                + data        = ( vars ) Data container
                    ~ Required
                + password    = ( int ) The surrogate password
                    ~ Required
                + optimizer   = ( enum ) The optimizer to user during training
                    ~ Required

            |\n

            Returns:

                + dictionary    = ( dict )
                    ~ iterations    = ( int ) Number of training iterations
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ scalar        = ( float ) The surrogate scalar
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if surrogate passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Sarah.trainSurrogate() -> Step 2: No surrogate passed")

        #   STEP 4: Check if data container passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Sarah.trainSurrogate() -> Step 4: No data container passed")

        #   STEP 6: Check if surrogate password passed
        if ("password" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Sarah.trainSurrogate() -> Step 6: No surrogate password passed")

        #   STEP 8: Check if optimizer passed
        if ("optimizer" not in kwargs):
            #   STEP 9: Error handling
            raise Exception("An error occured in Sarah.trainSurrogate() -> Step 8: No optimizer passed")


        #   STEP 10: Check if pso
        if (kwargs["optimizer"] == sw.PSO):
            #   STEP 11: User Output
            if (self.bShowOutput):
                print("Sarah (train-srg) -> (train-srg-pso) {" + Helga.time() + "}")

            #   STEP 12: Outsource pso optimization and return
            return self.__psoTraining__(surrogate=kwargs["surrogate"], data=kwargs["data"], password=kwargs["password"])
            
        else:
            #   STEP ??: Error handling
            raise Exception("An error occured in Sarah.trainSurrogate(): Unimplemented optimizer passed")
            
        #   STEP ??: Return
        return None
        
    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Gets

    def __getCandidates__(self, **kwargs) -> list:
        """
            Description:

                Returns a list of candidates for the specified algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + optimizer     = ( enum ) The optimizer
                    ~ Required
                + params        = ( dict ) The optimizer's parameters
                    ~ Required
                + initial       = ( list ) The initial candidate
                    ~ Required

            |\n

            Returns:

                + list          = ( list ) A list of new candidates

        """

        #   STEP 0: Local variables
        lCandidates             = []
        lShape                  = None

        #   STEP 1: Setup - Local variables
        lShape                  = Helga.getShape(kwargs["initial"])

        #   region STEP 2->??: PSO Candidate generation

        #   STEP 2: Check if PSO
        if (kwargs["optimizer"] == sw.PSO):
            #   STEP 3: Append the initial candidate to the candidate list
            lCandidates.append(kwargs["initial"])

            #   STEP 4: Iterate through remaining required candidates
            for _ in range(1, kwargs["params"]["candidates"]):
                #   STEP 5: Get new empty candidate
                lTmp = cp.deepcopy(lShape)

                #   STEP 6: Iterate through candidate list
                for i in range(0, len(lTmp)):
                    #   STEP 7: Check if single data point
                    if (type(lTmp[i]) == float):
                        #   STEP 8: Set value
                        lTmp[i] = rn.uniform(-1.0, 1.0)

                    else:
                        #   STEP 9: Iterate through list
                        for j in range(0, len(lTmp[i])):
                            #   STEP 10: Set value
                            lTmp[i][j] = rn.uniform(-1.0, 1.0)

                #   STEP 11: Append the candidate to the output list
                lCandidates.append(lTmp)

            #   STEP 12: Return
            return lCandidates

        #   STEP ??: Error handling
        raise Exception("An error occured in Sarah.__getCandidates__(): Unimplemented optimizer passed")

    def __getShape__(self, **kwargs) -> list:
        """
        """

        #   STEP 0: Local variables


    #
    #   endregion

    def __getFitness__(self, **kwargs) -> list:
        """
            Description:

                Returns the fitness of the candidates as a list.
            
            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + type              = ( str ) The calling function
                    ~ Required
                + candidates        = ( list ) List of potential candidates
                    ~ Required

                - Surrogate:
                    + surrogate     = ( vars ) The surrogate instance
                        ~ Required
                    + data          = ( vars ) Data container
                        ~ Required
                    + password      = ( int/float ) Class password
                        ~ Required

            |\n

            Returns:

                + list          = ( list )
                    ~ List of floats containing fitness for each candidate
                    
        """

        #   STEP 0: Local variables
        lOut                    = []

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if type passed
        if ("type" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Sarah.__geFitness__() -> Step 2: No type passed")

        #   STEP 4: Check if candidates passed
        if ("candidates" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Sarah.__getFitness__() -> Step 4: No candidate list passed")

        #   STEP 6: Check type
        if (kwargs["type"] == "surrogate"):
            #   STEP 7: Check if surrogate passed
            if ("surrogate" not in kwargs):
                #   STEP 8: Error handling
                raise Exception("An error occured in Saragh.__getFitness__() -> Step 7: No surrogate passed")

            #   STEP 9: Check data container passed
            if ("data" not in kwargs):
                #   STEP 10: Error handling
                raise Exception("An error occured in Sarah.__getFitness__() -> Step 9: No data container passed")

            #   STEP 11: Check if class password passed
            if ("password" not in kwargs):
                #   STEP 12: Error handling
                raise Exception("An error occured in Sarah.__getFitness__() -> Step 11: NO class password passed")

            #   STEP 13: Get temp variables
            surrogate = kwargs["surrogate"]
            data = kwargs["data"]
            candidates = kwargs["candidates"]
            password = kwargs["password"]

            #   STEP 14: Iterate through candidates
            for i in range(0, len(candidates)):
                #   STEP 15: Set the surrogate weights
                surrogate.setWeights(weights=candidates[i], password=password)
                
                #   STEP 16: Append fitness to output list
                lOut.append(surrogate.getAFitness(data=data))

            #   STEP 17: Return
            return lOut

    def __getParams__(self, **kwargs) -> dict:
        """
            Desciption:

                Returns the specified optimization algorithm's required
                parameters.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + optimizer     = ( enum ) The optimizatoin algorithm
                    ~ Required

            |\n

            Returns:

                + dict          = ( dict ) The algorithm's parameters
        """

        #   STEP 0: Local variables
        dTmp                    = self.__config.data["parameters"]["algorithms"]

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if PSO
        if (kwargs["optimizer"] == sw.PSO):
            #   STEP 3: Adjust holder dictionary
            dTmp = dTmp["pso"]

            #   STEP 4: Populate output dictionary
            dOut = {
                "iterations":       dTmp["training"]["iterations"]["algorithm"]["default"],
                "iterations-def":   dTmp["training"]["iterations"]["back propagation"]["default"],
                "candidates":       dTmp["training"]["candidates"]["default"],
                "scalar":           dTmp["training"]["candidate scalar"]["default"],
                "check point":      dTmp["training"]["acc check point"]["default"],
                "requirement":      dTmp["training"]["acc requirement"]["default"],
                "phi1":             dTmp["training"]["parameters"]["phi 1"]["default"],
                "phi2":             dTmp["training"]["parameters"]["phi 2"]["default"],
                "eta":              dTmp["training"]["parameters"]["eta"]["default"],

                "mapping":          dTmp["mapping"]
            }

            #   STEP 5: Return
            return dOut

        #   STEP 6: Error handling
        raise Exception("An erro occured in Sarah.__getParams__() -> Step 6: Unimplemented algorithm passed")

    #
    #   endregion

    #   region Back-End: Training

    def __psoTraining__(self, **kwargs) -> dict:
        """
            Description:

                Trains the passed surrogate using Particle-Swarm Optimization.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + surrogate     = ( vars ) The surrogate instance to be trained
                    ~ Required
                + data          = ( vars ) Data container
                    ~ Required
                + password      = ( int/float ) The surrogate password
                    ~ Required
            
            |\n

            Returns:

                + dictionary        = ( dict ) A dict instance containing
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ iterations    = ( int ) The training iterations
                    ~ scalar        = ( float ) The surrogate intstance's scalar
        """

        #   STEP 0: Local variables

        surrogate               = None
        password                = None

        dPsoParams              = None

        swarm                   = None

        dTrainingData           = None
        dTestingData            = None

        lCandidates             = []
        lFitness                = []

        #   region STEP 1->6: Error checking

        #   STEP 1: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 2: Error handling
            raise Exception("An error occured in Sarah.__psoTraining__() -> Step 1: No surorgate arg passed")

        #   STEP 3: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 4: Error handling
            raise Exception("An error occured in Sarah.__psoTraining__() -> Stpe 3: No data arg passed")

        #   STEP 5: Check if password arg passed
        if ("password" not in kwargs):
            #   STEP 6: Error handling
            raise Exception("An error occured in Sarah.__psoTraining__() -> Step 5: No password arg passed")

        #
        #   endregion

        #   region STEP 7->17: Setup - Local variables

        #   STEP 7: Init algorithm parameters
        dPsoParams      = self.__getParams__(optimizer=sw.PSO)

        #   STEP 8: Init datasets
        dTestingData    = kwargs["data"].splitData()

        dTrainingData   = dTestingData["training"]
        dTestingData    = dTestingData["testing"]

        #   STEP 9: Set surrogate and password variables
        surrogate       = kwargs["surrogate"]
        password        = kwargs["password"]

        #   STEP 10: Init candidate list
        lCandidates     = self.__getCandidates__(optimizer=sw.PSO, params=dPsoParams, initial=surrogate.getWeights(password=password))

        #   STEP 11: Init fitness list
        lFitness        = self.__getFitness__(type="surrogate", candidates=lCandidates, surrogate=surrogate, data=dTestingData, password=password) 

        #   STEP 12: Init swarm
        swarm           = SwarmChan(dPsoParams["candidates"])

        swarm.initPsoPositions( lCandidates )
        swarm.initPsoFitness( lFitness )
        swarm.initPsoParams( dPsoParams["phi1"], dPsoParams["phi2"], dPsoParams["eta"] )
        
        #   STEP 13: Get rand
        fTmp            = rn.uniform(0.0, 1.0)

        #   STEP 14: Check if L1
        if (fTmp < 0.65):
            #   STEP 15: Set - L1
            surrogate.bUse_L1  = True

        #   STEP 16: Check if L2
        elif  (fTmp < 0.85):
            #   STPE 17: Set - L2
            surrogate.bUse_L2   = True

        #
        #   endregion

        #   STEP 18: User Output
        if (self.bShowOutput):
            print("Sarah (train-srg-pso) {" + Helga.time() + "} - Starting Particle-Swarm Optimization\n")

        #   STEP 19: Perform number of iterations
        for i in range(0, dPsoParams["iterations"] + 1):
            #   STEP 20: Reset variables
            lFitness    = []
            lCandidates = []

            #   region STEP 5->14: Training process

            #   STEP 5: Perform swarming
            swarm.pso()

            #   STEP 6: Iterate through all particles
            for j in range(0, dPsoParams["candidates"]):
                #   STEP 7: Append particle to list
                swarm.lParticles[j].lCurrPosition = self.__limit_candidate_to_trust_region__(candidate=swarm.lParticles[j].lCurrPosition)
                lCandidates.append(swarm.lParticles[j].lCurrPosition)

            #   STEP 8: Get updated fitness values
            lFitness = self.__getFitness__(type="surrogate", candidates=lCandidates, surrogate=surrogate, data=dTestingData, password=password)

            #   STEP 9: Set new particle fitness
            swarm.setParticleFitness(lFitness)

            #   STEP 10: Set surrogate weights to best candidate
            surrogate.setWeights(weights=swarm.lBestSolution, password=password)

            #   STEP 11: Perform default training
            for j in range(0, dPsoParams["iterations-def"]):
                #   STEP 12: Get random data sample
                dDNR = dTrainingData.getRandDNR(noise=True)

                #   STEP 13: Perform propagation
                surrogate.propagateForward(data=dDNR["in"], password=password)
                surrogate.propagateBackward(data=dDNR["out"], password=password)

            #   STEP 14: Update best candidate
            swarm.lBestSolution = surrogate.getWeights(password=password)

            #
            #   endregion

        #   STEP 24: Get accuracy as percentage
        dHold   = surrogate.getAccuracy(data=kwargs["data"], size=kwargs["data"].getLen(), full_set=True)
        iAcc    = dHold["accurate samples"]
        fAcc    = dHold["percent accuracy"]

        #   STEP 23: User Output
        if (self.bShowOutput):
            #   STEP 25: Print output
            print("\tSarah (train-srg-pso) {" + Helga.time() + "} - Particle-Swarm Optimization Unsuccessful")
            print("\t\tTotal iterations: " + str(i))
            print("\t\tAccurate Samples: " + str(iAcc))
            print("\t\tPercent Accuracy: " + str(round(fAcc * 100.0, 2)) + "%\n")
            
        #   STEP 26: Populate output dictionary
        dOut = {
            "accuracy":     iAcc,
            "algorithm":    "pso",
            "iterations":   -i,
            "scalar":       dPsoParams["scalar"],
            "surrogate":    surrogate
        }

        #   STEP 27: Check that iAcc > 0
        if (iAcc <= 0):
            dOut["inverse accuracy"] = np.inf

        else:
            dOut["inverse accuracy"] = float(dHold["iterations"] / iAcc)

        #   STEP 28: Return
        return dOut

    def __nmTraining__(self, **kwargs) -> dict:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables
        #   STEP 2: ??
        #   STEP ??: Return
        return {}

    #
    #   endregion

    #   region Back-End: Mapping

    def __psoMapping__(self, **kwargs) -> dict:
        """
            Description:

                Maps the passed surrogate using Particle-Swarm Optimization.

            |\n
            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + surrogate = ( vars) The surrogate instance to be mapped
                    ~ Required

                + data  = ( vars ) A Data container that contains the data
                    for the mapping process
                    ~ Required
        """

        #   STEP 0: Local variables
        vData                   = None
        vSRG                    = None
        vSwarm                  = None

        dPSO_Params             = None

        lCandidates             = []
        lFitness                = []

        #   STEP 1: Setup - Local variables

        #   region STEP 2->5: Error checking
        
        #   STEP 2: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Sarah.__psoMapping__() -> Step 2: No surrogate arg passed")

        #   STEP 4: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Sarah.__psoMapping__() -> Step 4: No data arg passed")
        
        #
        #   endregion
        
        #   region STEP 6->11: Setup - Local variables

        #   STEP 6: Update - Local variables
        vData   = kwargs["data"]
        vSRG    = kwargs["surrogate"]

        #   STEP 7: Get PSO params
        dPSO_Params     = self.__getParams__(optimizer=sw.PSO)["mapping"]
        
        #   STEP 8: Get initial candidate
        iTmp_Candidate  = vData.getInputWidth()
        lTmp_Candidate  = []

        for _ in range(0, iTmp_Candidate):
            lTmp_Candidate.append(0.0)

        lCandidates     = self.__getCandidates__(optimizer=sw.PSO, params=dPSO_Params, initial=lTmp_Candidate)

        vData.reset()
        for i in range(0, vData.getLen()):
            lCandidates.append(vData.getRandDNR()["in"])

        dPSO_Params["candidates"] = len(lCandidates)

        #   STEP 9: Loop through candidates
        for i in range(0, len(lCandidates)):
            #   STPE 10: Get candidate fitness
            lFitness.append( vSRG.getPointOutput( lCandidates[i] ) )


        #   STEP 11: Setup - Swarm chan
        vSwarm  = SwarmChan(dPSO_Params["candidates"])

        vSwarm.initPsoPositions(lCandidates)
        vSwarm.initPsoFitness(lFitness)
        vSwarm.initPsoParams(dPSO_Params["phi1"], dPSO_Params["phi2"], dPSO_Params["eta"])

        #
        #   endregion
        
        #   STEP 12: User output
        if (self.bShowOutput):
            print("Sarah (map-srg-pso) {" + Helga.time() + "} - Starting Particle-Swarm Optimization mapping")

        #   STEP 13: Iterate
        for i in range(0, dPSO_Params["iterations"] + 1):
            #   STEP 14: Setup - Local variables
            lCandidates = []
            lFitness    = []

            #   STEP 15: Perform swarming
            vSwarm.pso()

            #   STEP 16: Iterate through candidates
            for j in range(0, dPSO_Params["candidates"]):
                #   STPE 17: Get particle fitness
                vSwarm.lParticles[j].lCurrPosition = self.__limit_candidate_to_trust_region__(candidate=vSwarm.lParticles[j].lCurrPosition)
                lFitness.append( vSRG.getPointOutput( vSwarm.lParticles[j].lCurrPosition ) )

            #   STEP 18: Update swarm fitness
            vSwarm.setParticleFitness(lFitness)

        #   STEP 19: User output
        if (self.bShowOutput):
            print("Sarah (map-srg-PSO) {" + Helga.time() + "} - Particle-Swarm Optimzation mapping completed")
            print("\tTotal Iterations: " + str(i))

        #   STEP 18: Populate output dictionary
        dOut    = {
            "result":       vSwarm.lBestSolution,
            "fitness":      vSwarm.fBestSolution,
            "iterations":   i
        }

        #   STEP 19: Return
        return dOut
        
    def __nmMapping__(self, **kwargs) -> dict:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables
        #   STEP 2: ??
        #   STEP ??: Return
        return {}
        
    #
    #   endregion
    
    #   region Back-End: Other

    def __limit_candidate_to_trust_region__(self, **kwargs) -> list:
        """
            Description:

                Limits the provided candidate to the range of -1 and 1.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + candidate     = ( list ) The candidate to be adjusted
                    ~ Required
            
            |\n

            Returns:

                + dictionary        = ( dict ) A dict instance containing
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ iterations    = ( int ) The training iterations
                    ~ scalar        = ( float ) The surrogate intstance's scalar
        """

        #   region STEP 0->1: Error handling

        #   STEP 0: Check if candidate arg passed
        if ("candidate" not in kwargs):
            #   STEP 1: Error handling
            raise Exception("An error occured in Sarah.__limit_candidate_to_trust_region__() -> Step 0: No candidate arg passed")

        #
        #   endregion
        
        #   STEP 2: Local variables
        lCandidate                  = kwargs["candidate"]

        #   STEP 3: Loop through candidate
        for i in range(0, len(lCandidate)):
            #   STEP 4: Check if single data point
            if (type(lCandidate[i]) == float):
                #   STEP 5: Check if value over limit
                if (lCandidate[i] > 1.0):
                    #   STEP 6: Limit value
                    lCandidate[i] = 1.0
                
                #   STEP 7: Check if value below lower limit
                elif (lCandidate[i] < -1.0):
                    #   STEP 8: Limit value
                    lCandidate[i] = -1.0

            else:
                #   STEP 9: Loop through data point
                for j in range(0, len(lCandidate[i])):
                    #   STEP 10: Check if value over upper limit
                    if (lCandidate[i][j] > 1.0):
                        #   STEP 11: Limit value
                        lCandidate[i][j] = 1.0
                    
                    #   STEP 12: Check if value below lower limit
                    elif (lCandidate[i][j] < -1.0):
                        #   STEP 13: Limit value
                        lCandidate[i][j] = -1.0

        #   STEP 14: Return
        return lCandidate

    #
    #   endregion

    #
    #endregion

#
#endregion

#region Testing

#
#endregion
class SpongeBob:

    #region Init
    """
    """
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum = en.SpongeBob
        self.__config = Conny()
        self.__config.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting = self.__config.data["parameters"][
            "allow testing"]["default"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput = self.__config.data["parameters"]["show output"][
            "default"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Mapping

    def mapSurrogate(self, **kwargs) -> dict:
        """
            Description:

                Maps the passed surrogate using the specified optimizer.

            |\n
            |\n
            |\n
            |\n
            |\n
            
            Arguments:

                + surrogate = ( vars ) The surrogate that requires mapping
                    ~ Required

                + data  = ( vars ) A Data container that contains the data
                    for the mapping process
                    ~ Required

                + optimizer = ( enum ) The optimizer to be used during the
                    mapping process
                    ~ Required
        """

        #   STEP 0: Local variables

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SpongeBob.mapSurrogate() -> Step 2: No surrogate arg passed"
            )

        #   STEP 4: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SpongeBob.mapSurrogate() -> Step 4: No data arg passed"
            )

        #   STEP 6: Check if optimizer arg passed
        if ("optimizer" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SpongeBob.mapSurrogate() -> Step 6: No optimizer arg passed"
            )

        #
        #   endregion

        #   STEP 8: Check if TRO
        if (kwargs["optimizer"] == ga.TRO):
            #   STEP 10: User output
            if (self.bShowOutput):
                print("SpongeBob (map-srg) -> (map-srg-TRO) {" + Helga.time() +
                      "}")

            #   STEP 11: Outsource to tro and return
            return self.__troMapping__(surrogate=kwargs["surrogate"],
                                       data=kwargs["data"])

        #   STEP 12: Unrecognized optimizer - Error handling
        raise Exception(
            "An error occured in SpongeBob.mapSurrogate() -> Step 12: Unrecognized optimizer"
        )

    #
    #   endregion

    #   region Front-End: Training

    def trainSurrogate(self, **kwargs) -> dict:
        """
            Description:

                Trains the passed surrogate using the specified optimizer.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + surrogate   = ( vars ) The surrogate instance to be trained
                    ~ Required

                + data        = ( vars ) Data container
                    ~ Required

                + password    = ( int ) The surrogate password
                    ~ Required

                + optimizer   = ( enum ) The optimizer to user during training
                    ~ Required

            |\n

            Returns:

                + dictionary    = ( dict )
                    ~ iterations    = ( int ) Number of training iterations
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ scalar        = ( float ) The surrogate scalar
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if surrogate passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SpongeBob.trainSurrogate() -> Step 2: No surrogate passed"
            )

        #   STEP 4: Check if data container passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SpongeBob.trainSurrogate() -> Step 4: No data container passed"
            )

        #   STEP 6: Check if surrogate password passed
        if ("password" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SpongeBob.trainSurrogate() -> Step 6: No surrogate password passed"
            )

        #   STEP 8: Check if optimizer passed
        if ("optimizer" not in kwargs):
            #   STEP 9: Error handlign
            raise Exception(
                "An error occured in SpongeBob.trainSurrogate() -> Step 8: No optimizer passed"
            )

        #   STEP 10: Check if tro
        if (kwargs["optimizer"] == ga.TRO):
            #   STEP 11: User Output
            if (self.bShowOutput):
                print("SpongeBob (train-srg) -> (train-srg-tro) {" +
                      Helga.time() + "}")

            #   STEP 12: Outsource tro optimization and return
            return self.__troTraining__(surrogate=kwargs["surrogate"],
                                        data=kwargs["data"],
                                        password=kwargs["password"])

        else:
            #   STEP ??: Error handling
            raise Exception(
                "An error occured in SpongeBob.trainSurrogate(): Unimplemented optimizer passed"
            )

        #   STEP ??: Return
        return None

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Gets

    def __getCandidates__(self, **kwargs) -> list:
        """
            Description:

                Returns a list of candidates for the specified algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + optimizer = ( enum ) The optimzier
                    ~ Required

                + params    = ( dict ) The optimizer's parameters
                    ~ Required

                + initial   = ( list ) The initial candidate

                + region    = ( float ) The algorithm's current region
                    ~ Required if <optimizer="tro">

            |\n

            Returns

                + list      = ( list ) A list of new candidates
        """

        #   STEP 0: Local variables
        lCandidates = []

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if optimizer passed
        if ("optimizer" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getCandidates__() -> Step 2: No optimizer passed"
            )

        #   STEP 4: Check optimizer parameters passed
        if ("params" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getCandidates__() -> Step 4: No optimizer parameters passed"
            )

        #   STEP 6: Check if initial candidate passed
        if ("initial" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getCandidates__() -> Step 6: No initial candidate passed"
            )

        #   STEP 8: Check if optimizer is tro
        if (kwargs["optimizer"] == ga.TRO):
            #   STEP 9: Check if region passed
            if ("region" not in kwargs):
                #   STEP 10: Error handling
                raise Exception(
                    "An error occured in SpongeBob.__getCandidates__() -> Step 9: No region passed"
                )

            #   STEP 11: Iterate through the required number of candidates
            for _ in range(0, kwargs["params"]["candidates"]):
                #   STEP 12: Get temporary candidate
                lTmp = Helga.getShape(kwargs["initial"])

                #   STEP 13: Iterate through candidate
                for i in range(0, len(lTmp)):
                    #   STEP 14: Check if single point
                    if (type(lTmp[i]) == float):
                        #   STEP 16: Modify value using region and scalar
                        lTmp[i] = rn.gauss(
                            kwargs["initial"][i],
                            kwargs["params"]["scalar"] * kwargs["region"])

                        #   STEP 17: Check if value above upper limit
                        if (lTmp[i] > 1.0):
                            #   STEP 18: Limit value
                            lTmp[i] = 1.0

                        #   STEP 19: Check if value below lower limit
                        if (lTmp[i] < -1.0):
                            #   STEP 20: Limit value
                            lTmp[i] = -1.0

                    else:
                        #   STEP 21: Iterate through list
                        for j in range(0, len(lTmp[i])):
                            #   STEP 22: Modify value using region and sacalar
                            lTmp[i][j] = rn.gauss(
                                kwargs["initial"][i][j],
                                kwargs["params"]["scalar"] * kwargs["region"])

                            #   STEP 23: Check if value above upper limit
                            if (lTmp[i][j] > 1.0):
                                #   STEP 24: Limit value
                                lTmp[i][j] = 1.0

                            #   STEP 25: Check if value below lower limit
                            if (lTmp[i][j] < -1.0):
                                #   STEP 26: Limit value
                                lTmp[i][j] = -1.0

                #   STEP 22: Append new candidate to output list
                lCandidates.append(lTmp)

            #   STEP 23: Return
            return lCandidates

        #   STEP ??: Error handling
        raise Exception(
            "An error occured in SpongeBob.__getCandidates__(): Unimplemented optimizer passed"
        )

    def __getFitness__(self, **kwargs) -> list:
        """
            Description:

                Returns the fitness of the candidates as a list

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + type          = ( str ) The calling function
                    ~ Required
                + candidates    = ( list ) List of potential candidates
                    ~ Required

                - Type = Surrogate:
                    + surrogate = ( vars ) The surrogate instance
                        ~ Required
                    + data      = ( vars ) Data container
                        ~ Required
                    + password  = ( int / float ) Class password
                        ~ Required
        """

        #   STEP 0: Local variables
        lOut = []
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check that candidates were passed
        if ("candidates" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getFitness__() -> Step 2: No candidate list passed"
            )

        #   STEP 4: Check if type specified
        if ("type" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getFitness__() -> Step 4: No type specified"
            )

        #   STEP 6: If surrogate
        if (kwargs["type"] == "surrogate"):
            #   STEP 7: Check if surrogate passed
            if ("surrogate" not in kwargs):
                #   STEP 8: Error handling
                raise Exception(
                    "An error occured in SpongeBob.__getFitness__() -> Step 7: No surrogate passed"
                )

            #   STEP 9: Check if data container passed
            if ("data" not in kwargs):
                #   STEP 10: Error handling
                raise Exception(
                    "An error occured in SpongeBob.__getFitness__() -> Step 9: No data passed"
                )

            #   STEP 11: Check if class password passed
            if ("password" not in kwargs):
                #   STEP 12: Error handling
                raise Exception(
                    "An error occured in SpongeBob.__getFitness__() -> Step 11: No class password passed"
                )

            #   STEP 13: Get temp variables
            surrogate = kwargs["surrogate"]
            data = kwargs["data"]
            candidates = kwargs["candidates"]
            password = kwargs["password"]

            #   STEP 14: Iterate through candidates
            for i in range(0, len(candidates)):
                #   STEP 15: Set the surrogate weights
                surrogate.setWeights(weights=candidates[i], password=password)

                #   STEP 16: Append fitness to output list
                lOut.append(surrogate.getAFitness(data=data))

            #   STEP 17: Return
            return lOut

        else:
            #   STEP ??: Error handling
            raise Exception(
                "An error occured in SpongeBob.__getFitness__() -> Step 6: Unimplemented functionality"
            )

    def __getParams__(self, **kwargs) -> dict:
        """
            Description:

                Returns the specified optimization algorithm's required
                parameters.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + optimizer     = ( enum ) The optimization algorithm

            |\n

            Returns:

                + dictionary    = ( dict ) Contains the following
                    ~ scalar    = ( float ) Algorithm weight scalar
                    ~ candidates    = ( int ) Number of candidates
                    ~ region        = ( int ) Initial region
                    ~ iterations    = ( int ) Algorithm iterations
                    ~ iterations-def    = ( int ) Algorithm default training
                        iterations

        """

        #   STEP 0: Local variables
        dTmp = self.__config.data["parameters"]["algorithms"]

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if TRO
        if (kwargs["optimizer"] == ga.TRO):
            #   STEP 3: Adjust holder dictionary
            dTmp = dTmp["tro"]

            #   STEP 4: Populate output dictionary
            dOut = {
                "iterations":
                dTmp["training"]["iterations"]["algorithm"]["default"],
                "iterations-def":
                dTmp["training"]["iterations"]["back propagation"]["default"],
                "candidates":
                dTmp["training"]["candidates"]["default"],
                "scalar":
                dTmp["training"]["candidate scalar"]["default"],
                "check point":
                dTmp["training"]["acc check point"]["default"],
                "requirement":
                dTmp["training"]["acc requirement"]["default"],
                "region":
                dTmp["training"]["region"]["default"],
                "mapping":
                dTmp["mapping"]
            }

            #   STEP 5: Return
            return dOut

        #   STEP ??: Error handling
        raise Exception(
            "An error occured in SpongeBob.__getParams__(): Unimplemented optimizer passed"
        )

    #
    #   endregion

    #   region Back-End: Training

    def __troTraining__(self, **kwargs) -> dict:
        """
            Description:

                Trains the passed surrogate using Trust-Region Optimization.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + surrogate = ( vars ) The surrogate instance to be trained
                    ~ Required

                + data      = ( vars ) Data container
                    ~ Required

                + password  = ( int / float ) The surrogate instance's password
                    ~ Required

            |\n

            Returns:

                + dictionary        = ( dict ) A dict instance containing
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ iterations    = ( int ) The training iterations
                    ~ scalar        = ( float ) The surrogate intstance's scalar

        """

        #   STEP 0: Local variables
        surrogate = None
        password = None

        dTroParams = None

        garry = None

        dTestingData = None

        lCandidates = []
        lFitness = []

        #   region STEP 1->6: Error checking

        #   STEP 1: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 2: Error handling
            raise Exception(
                "An error occured in SpongeBob.__troTraining__() -> Step 1: NO surrogate arg passed"
            )

        #   STEP 3: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 4: Error handling
            raise Exception(
                "An error occured in SpongeBob.__troTraining__() -> Step 3: No data arg passed"
            )

        #   STEP 5: Check if password arg passed
        if ("password" not in kwargs):
            #   STEP 6: Error handling
            raise Exception(
                "An error occured in SpongeBob.__troTraining__() -> Step 5: No password arg passed"
            )

        #
        #   endregion

        #   region STEP 7->13: Setup - Local variables

        #   STEP 7: Init algorithm parameters
        dTroParams = self.__getParams__(optimizer=ga.TRO)

        #   STEP 8: Init datasets
        dTestingData = kwargs["data"].splitData()

        dData_Train = dTestingData["training"]
        dData_Test = dTestingData["testing"]

        #   STEP 9: Init surrogate and password variables
        surrogate = kwargs["surrogate"]
        password = kwargs["password"]

        #   STEP 10: Init surrogate activation funcitons

        #   STEP 11: Init candidate list
        lCandidates.append(surrogate.getWeights(password=password))

        #   STEP 12: Init fitness list
        lFitness = self.__getFitness__(type="surrogate",
                                       candidates=lCandidates,
                                       surrogate=surrogate,
                                       data=dData_Test,
                                       password=password)

        #   STEP 13: Init genetic algorithm
        garry = Garry(dTroParams["candidates"])

        garry.initTroParticles(candidates=lCandidates)
        garry.initTroFitness(fitness=lFitness)
        garry.initTroParams(region=dTroParams["region"])

        #   STPE 14: Check if L1
        fTmp = rn.uniform(0.0, 1.0)

        #   STEP 15: Check if L1
        if (fTmp < 0.65):
            #   STEP 16: Set - L1
            surrogate.bUse_L1 = True

        #   STEP 17: Check if L2
        elif (fTmp < 0.85):
            #   STEP 18: Set - L2
            surrogate.bUse_L2 = True

        #
        #   endregion

        #   STEP 14: User Output
        if (self.bShowOutput):
            print("SpongeBob (train-srg-tro) {" + Helga.time() +
                  "} - Starting Trust-Region Optimization\n")

        #   STEP 15: Perform specified number of iterations
        for i in range(0, dTroParams["iterations"] + 1):
            #   STEP 16: Clear necesarry variables
            lCandidates = []
            lFitness = []

            #   STEP 5: Populate candidate list
            lCandidates = self.__getCandidates__(
                optimizer=ga.TRO,
                params=dTroParams,
                initial=garry.lTroBest[0].lCurrPosition,
                region=float(garry.iTroRegion / dTroParams["region"]))

            #   STEP 6: Get candidate list fitness
            for j in range(0, len(lCandidates)):
                #   STEP 7: Set surrogate weights
                surrogate.setWeights(weights=lCandidates[j], password=password)

                #   STEP 8: Append candidate fitness
                lFitness.append(surrogate.getAFitness(data=dData_Test))

            #   STEP 9: Update garry
            garry.setPopulation(candidates=lCandidates)
            garry.setFitness(fitness=lFitness)

            #   STEP 10: Set surrogate weight to best candidate
            surrogate.setWeights(weights=garry.vBestSolution.lCurrPosition,
                                 password=password)

            #   STEP 11: Perform default training
            for j in range(0, dTroParams["iterations-def"]):
                #   STEP 12: Get random data sample
                dDNR = dData_Train.getRandDNR(noise=True)

                #   STEP 13: Perform propagation
                surrogate.propagateForward(data=dDNR["in"], password=password)
                surrogate.propagateBackward(data=dDNR["out"],
                                            password=password)

            #   STEP 14: Update garry
            garry.vBestSolution.lCurrPosition = surrogate.getWeights(
                password=password)
            garry.fBestSolution = surrogate.getAFitness(data=dData_Test)

            #   STEP 15: Perform trust-region optimization
            garry.tro()

            #   STEP 16: Check if region is still okay
            if (garry.iTroRegion <= 1):
                #   STEP 17: Exit loop
                break

        #   STEP 26: Get accuracy as percentage
        dHold = surrogate.getAccuracy(data=kwargs["data"],
                                      size=kwargs["data"].getLen(),
                                      full_set=True)
        iAcc = dHold["accurate samples"]
        fAcc = dHold["percent accuracy"]

        #   STEP 27: User Output
        if (self.bShowOutput):
            #   STEP 28: Print output
            if (fAcc >= dTroParams["requirement"]):
                print("SpongeBob (train-srg-tro) {" + Helga.time() +
                      "} - Trust-Region Optimization successful")
                print("\tTotal Iterations: " + str(i))
                print("\tAccurate Samples: " + str(iAcc))
                print("\tPercent Accuracy: " + str(round(fAcc * 100.0, 2)) +
                      "%\n")

            else:
                print("\tSpongeBob (train-srg-tro) {" + Helga.time() +
                      "} - Trust-Region Optimization Unsuccessful")
                print("\t\tTotal iterations: " + str(i))
                print("\t\tAccurate Samples: " + str(iAcc))
                print("\t\tPercent Accuracy: " + str(round(fAcc * 100.0, 2)) +
                      "%\n")

        #   STEP 29: Populate output dictionary
        dOut = {
            "accuracy": iAcc,
            "algorithm": "tro",
            "iterations": -i,
            "scalar": dTroParams["scalar"],
            "surrogate": surrogate
        }

        #   STEP 31: Check that iAcc > 0
        if (iAcc <= 0):
            dOut["inverse accuracy"] = np.inf

        else:
            dOut["inverse accuracy"] = float(dHold["iterations"] / iAcc)

        #   STEP 30: Return
        return dOut

    #
    #   endregion

    #   region Back-End: Mapping

    def __troMapping__(self, **kwargs) -> dict:
        """
            Description:

                Maps the passed surrogate using Trust-Region Optimization.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + surrogate = ( vars ) The surrogate instance to be mapped
                    ~ Required

                + data  = ( vars ) A Data container that contains the dataset
                    to be used during the mapping process
        """

        #   STEP 0: Local variables
        vData = None
        vGarry = None
        vSRG = None

        dTRO_Params = None

        lCandidates = []
        lFitness = []

        #   STEP 1: Setup - Local variables

        #   region STEP 2->5: Error checking

        #   STEP 2: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SpongeBob.__troMapping__() -> Step 2: No surrogate arg passed"
            )

        #   STEP 4: CHeck if data arg passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SpongeBob.__troMapping__() -> Step 4: No data arg passed"
            )

        #
        #   endregion

        #   region STEP 6->10: Setup - Local variabls

        #   STEP 6: Update - Local variables
        vData = kwargs["data"]
        vSRG = kwargs["surrogate"]

        #   STEP 7: Get initial candidate
        iTmp_Candidate = vData.getInputWidth()
        lTmp_Candidate = []

        for _ in range(0, iTmp_Candidate):
            lTmp_Candidate.append(0.0)

        lCandidates.append(lTmp_Candidate)

        #   STEP 8: Get initial fitness
        lFitness.append(vSRG.getPointOutput(lTmp_Candidate))

        #   STEP 9: Get TRO params
        dTRO_Params = self.__getParams__(optimizer=ga.TRO)["mapping"]

        #   STEP 10: Setup - Garry
        vGarry = Garry(dTRO_Params["candidates"])

        vGarry.initTroParticles(candidates=lCandidates)
        vGarry.initTroFitness(fitness=lFitness)
        vGarry.initTroParams(region=dTRO_Params["region"])

        #
        #   endregion

        #   STEP 11: User output
        if (self.bShowOutput):
            print("SpongeBob (map-srg-TRO) {" + Helga.time() +
                  "} - Starting Trust-Region Optimization mapping")

        #   STEP 12: Loop for max iterations
        for i in range(0, dTRO_Params["iterations"] + 1):
            #   STEP 13: Clear required variables
            lFitness = []

            #   STEP 14: Populate candidate list
            lCandidates = self.__getCandidates__(
                optimizer=ga.TRO,
                params=dTRO_Params,
                initial=vGarry.lTroBest[0].lCurrPosition,
                region=float(vGarry.iTroRegion / dTRO_Params["region"]))

            #   STEP 15: Loop through candidates
            for j in range(0, len(lCandidates)):
                #   STEP 16: Get candidate fitness
                lFitness.append(vSRG.getPointOutput(lCandidates[j]))

            #   STEP 17: Update garry
            vGarry.setPopulation(candidates=lCandidates)
            vGarry.setFitness(fitness=lFitness)

            #   STEP 18: Perform trust-region optimization
            vGarry.tro()

            #   STEP 19: Check if region too small
            if (vGarry.iTroRegion <= 1):
                #   STEP 20: Exit loop
                break

        #   STEP 27: User output
        if (self.bShowOutput):
            print("SpongeBob (map-srg-TRO) {" + Helga.time() +
                  "} - Trust-Region Optimizaion mapping completed")
            print("\tTotal Iterations: " + str(i))

        #   STEP 28: Populate output dictionary
        dOut = {
            "result": vGarry.lTroBest[0].lCurrPosition,
            "fitness": vGarry.lTroBest[1],
            "iterations": i
        }

        #   STEP ??: Return
        return dOut

    #
    #   endregion

    #   region Back-end: Other

    def __limit_candidate_to_trust_region__(self, **kwargs) -> list:
        """
            Description:

                Limits the provided candidate to the range of -1 and 1.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + candidate     = ( list ) The candidate to be adjusted
                    ~ Required
            
            |\n

            Returns:

                + dictionary        = ( dict ) A dict instance containing
                    ~ surrogate     = ( vars ) The trained surrogate
                    ~ iterations    = ( int ) The training iterations
                    ~ scalar        = ( float ) The surrogate intstance's scalar
        """

        #   region STEP 0->1: Error handling

        #   STEP 0: Check if candidate arg passed
        if ("candidate" not in kwargs):
            #   STEP 1: Error handling
            raise Exception(
                "An error occured in Sarah.__limit_candidate_to_trust_region__() -> Step 0: No candidate arg passed"
            )

        #
        #   endregion

        #   STEP 2: Local variables
        lCandidate = kwargs["candidate"]

        #   STEP 3: Loop through candidate
        for i in range(0, len(lCandidate)):
            #   STEP 4: Check if single data point
            if (type(lCandidate[i]) == float):
                #   STEP 5: Check if value over limit
                if (lCandidate[i] > 1.0):
                    #   STEP 6: Limit value
                    lCandidate[i] = 1.0

                #   STEP 7: Check if value below lower limit
                elif (lCandidate[i] < -1.0):
                    #   STEP 8: Limit value
                    lCandidate[i] = -1.0

            else:
                #   STEP 9: Loop through data point
                for j in range(0, len(lCandidate[i])):
                    #   STEP 10: Check if value over upper limit
                    if (lCandidate[i][j] > 1.0):
                        #   STEP 11: Limit value
                        lCandidate[i][j] = 1.0

                    #   STEP 12: Check if value below lower limit
                    elif (lCandidate[i][j] < -1.0):
                        #   STEP 13: Limit value
                        lCandidate[i][j] = -1.0

        #   STEP 14: Return
        return lCandidate
class DAVID:

    #   region Init
    """
    ToDo "This bitch empty! YEEEET"
    """
    def __init__(self) -> None:

        #region STEP 0: Local variables

        self.__enum = en.David
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput = self.__cf.data["parameters"]["show output"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #   endregion

    #   region Front-End

    def main(self, _iIterations: int, _iWaitPeriod):
        """
        """
        #   STEP -1: Global variables
        global teUInputEvent
        global tTest

        #   STEP 0: Local variables
        sFileName = Helga.ticks()
        lData = []
        iCount = 0

        #   STEP 1: Setup - Global variables
        tTest = thread.Thread(target=self.__userInput)
        tTest.daemon = True
        tTest.start()

        #   STEP ..: Setup - local variables

        #   STEP 2: We out here looping
        while (True):
            #   STEP 3: Perform the result acquisition
            print("\tDAVID - Gathering data (" + str(iCount + 1) + " / " +
                  str(_iIterations) + ")")
            lData = self.__theTHING(lData, sFileName)
            iCount = iCount + 1

            #   STEP 4: Check for user input
            if (teUInputEvent.isSet() == True):
                #   STEP 4.1: Get global varialbes
                global sUserInput

                #   STEP 4.2: Check if input was to stop
                if (sUserInput == "stop"):
                    #   STEP 4.2.1: Clear variables and end loop
                    sUserInput = ""
                    teUInputEvent.clear()

                    break

                else:
                    #   STEP 4.2.2: Clear variables and restart thread (no additional commands atm)
                    sUserInput = ""
                    teUInputEvent.clear()

                    tTest.run()

            #   STEP 5: Check if iterations have been reached

            if ((_iIterations > 0) and (iCount >= _iIterations)):
                #   STEP 5.1: iteration condition achieved
                break

            #   STEP 6: Wait the set amount of time
            if ((_iWaitPeriod > 0) and (_iWaitPeriod <= 10)):
                t.sleep(_iWaitPeriod)

        #   STEP 7: Average data
        #lData = self.__averageData(lData, iCount)

        #   STEP 8: Write the data to file and ???
        self.__saveData(lData, sFileName, iCount)

        #   STEP 9: GTFO
        return

    #
    #   endregion

    #   region Back-End

    def __theTHING(self, _lData: list, _sFileName: str) -> list:
        #   values chosen arbitrarily
        #
        #   a = Swarm Size              = [10, 20 {, 5}]
        #   b = Swarm Iterations        = [15, 40 {, 5}]
        #   c = Def Iterations          = [400, 1000 {, 100}]

        #   STEP 0: Local vars
        global teUInputEvent

        fFile = None

        sFileName = ""
        iCount = 0
        bFirst = False

        #   STEP 1: Define if necesarry
        if (len(_lData) <= 0):
            bFirst = True
            sFileName = os.path.abspath(
                ".") + "\\Helpers\\Testing\\" + _sFileName + "_parm.txt"

            fileTmp = open(sFileName, "a")
            fileTmp.close()
            fileTmp = None

            fFile = open(sFileName, "r+")
            fFile.write(
                "Params=[Swarm Size, Swarm Iterations, Def Iterations]\n")

        #   STEP 0: Other variables
        dIris = Data()
        dIris.importData(
            os.path.abspath(".") + "\\Data\\DataSets\\Iris\\iris.data")

        #   STEP 2: Swarm Size
        for a in range(10, 21, 5):
            #   STEP 3: Swarm Iterations
            for b in range(15, 41, 5):
                #   STEP 4: Def Iterations
                for c in range(400, 1001, 100):
                    #   STEP 6: Create annie
                    """
                        fire = Annie(4, 3, 5, 5, -1)
                        fire.bShowOutput    = True
                        
                        #   STEP 6.1 Set activation function variables
                        fire.setTanH(0.2, 1.0)
                        fire.fLearningRate  = fire.fLearningRate * 5.0
                        
                        #   STEP 6.2: Set candidate and algorithm iterations
                        fire.iOptCandidates = a + 1
                        fire.iOptIterations = b + 1
                        fire.iMaxIterations = c + 1

                        fire.iAccCheckPoint = 25

                        #	STEP 7: Local variables
                        bFailed     = 0
                        iTime       = dt.datetime.now()
                        iAccuracy   = 0
                        iIterations = 0

                        try:
                            #   STEP 8: Run
                            iIterations = fire.trainSet(dIris, True, False, True, 3)
                            iAccuracy = fire.getAccuracy(dIris)

                        except Exception as ex:
                            #   STEP 2.7: bombed out
                            print("\tDAVID (The THING) {" + Helga().time() + "} - Bombed out:")
                            print("\t\tSwarm Size: " + str(a))
                            print("\t\tSwarm Iterations: " + str(b))
                            print("\t\tDef Iterations: " + str(c))
                            print("\t\tException: " + ex)
                            

                        #   STEP 2.8: If first iteration of test then add data
                        if (bFirst):
                            lTmp = []
                            lTmp.append(0)                              #0 - Count
                            lTmp.append(0)                              #1 - Time
                            lTmp.append(0)                              #2 - Iterations
                            lTmp.append(0)                              #3 - Accuracy

                            _lData.append(lTmp)

                            sTmp = ":" + str(a) + ":"
                            sTmp += str(b) + ":"
                            sTmp += str(c) + ":\n"
                            fFile.write(sTmp)

                        if (iIterations >= 0):
                            #   STEP 2.9: If training failed say so
                            _lData[iCount][0] += 1
                            
                            dTmp = dt.datetime.now() - iTime
                            dTmp = int(dTmp.total_seconds()*10000000)

                            _lData[iCount][1] += dTmp
                            _lData[iCount][2] += iIterations
                            _lData[iCount][3] += iAccuracy

                        #   STEP 21: Increase counter
                    """
                    iCount = iCount + 1

        if (fFile != None):
            fFile.close()
            fFile = None

        return _lData

    def __averageData(self, _lData: list, _iIterations: int) -> list:
        if (len(_lData) > 0):
            print("\tDAVID - Averaging Data")
            for i in range(0, len(_lData)):
                for j in range(1, len(_lData[i])):
                    _lData[i][j] = float(_lData[i][j]) / _iIterations

                    if (_lData[i][j] > 0):
                        _lData[i][j] = int(_lData[i][j])

                _lData[i][0] = int(_lData[i][0])

        return _lData

    def __userInput(self) -> None:
        global teUInputEvent

        if (teUInputEvent.is_set() == True):
            print("waiting")
            teUInputEvent.wait()

        global sUserInput

        sUserInput = input("")
        teUInputEvent.set()
        print(
            "\tDAVID - Input received. Please wait while the data acquisition finishes"
        )

    def __saveData(self, _lData: list, _sFileName: str,
                   _iIterations: int) -> None:
        if (len(_lData) > 0):
            print("\tDAVID - Saving Data")

            #   STEP 0: Local variables
            fFileOut = None

            try:
                #   STEP 0.1: Some more variables
                sFile = os.path.abspath(
                    ".") + "\\Helpers\\Testing\\" + _sFileName + "_data.txt"
                sTmp = ""

                #   STEP 1: Create file
                fFileOut = open(sFile, "a")
                fFileOut.close()
                fFileOut = None

                #   STEP 2: Write to file
                fFileOut = open(sFile, "r+")

                #   STEP 3: Loop through the list and write it to the file
                sTmp = "Iterations=" + str(
                    _iIterations
                ) + "|Params=[Failures, Time, Iterations, Accuracy]\n"
                fFileOut.write(sTmp)

                for i in range(0, len(_lData)):
                    sTmp = ":"
                    if (len(_lData[i]) > 1):
                        for j in range(0, len(_lData[i])):
                            sTmp = sTmp + str(_lData[i][j]) + ":"

                    else:
                        sTmp = sTmp + str(_lData[i]) + ":"

                    fFileOut.write(sTmp + "\n")

            except:
                print("An error occured in Helpers.DAVID->savData()")

            finally:
                if (fFileOut != None):
                    fFileOut.close()
                    fFileOut = None

                print("\tDAVID - Data Acquisition completed\n")
                return
Beispiel #24
0
    def __init__(self, _iParticles: int) -> None:

        #region STEP 0: Local variables

        self.__enum = en.SwarmChan
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region STEP 1.1: Particles

        self.__iParticles = _iParticles

        #   endregion

        #   region STEP 1.2: Init flags

        self.__bPsoState = [False, False, False, False]
        self.__bBeeState = [False, False, False, False]
        self.__bNemState = [False, False, False, False]

        #   endregion

        #   region STEP 1.3: bools

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #   endregion

        #   region STEP 1.4: Other

        self.__data = [None]

        #   endregion

        #endregion

        #region STEP 2: Public variables

        #   region STEP 2.1: Paricles

        self.lParticles = []

        self.lBestSolution = None
        self.fBestSolution = np.inf
        self.iBestSolution = 0

        #   endregion

        #   region STEP 2.2: PSO

        self.psoPhi1 = 0.0
        self.psoPhi2 = 0.0
        self.psoN = 0.0
        self.psoX = 0.0

        #   endregion

        #   region STEP 2.3: BEE

        #idk

        #   endregion

        #   region STEP 2.4: Nelder-Mead

        self.NM_Alpha = None
        self.NM_Beta = None
        self.NM_Gamma = None
        self.NM_Sigma = None

        self.NM_State = None
        self.NM_lGetFitness = None

        #
        #   endregion

        #   region STEP 2.5: Bools

        self.bShowOutPut = self.__cf.data["parameters"]["show output"]

        #   endregion

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #   region STEP 4.1: Particles

        for _ in range(0, _iParticles):
            self.lParticles.append(UwU())

        #   endregion

        #endregion

        return
Beispiel #25
0
class SwarmChan:

    #region Init
    """
        Description:

            This class are very kawai :3
        
        |\n
        |\n
        |\n
        |\n
        |\n
        
        Parameters:

            :param _iParticles: = ( int ) The number of particles in the swarm

        Returns:

            :return: >> (None)
    """
    def __init__(self, _iParticles: int) -> None:

        #region STEP 0: Local variables

        self.__enum = en.SwarmChan
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region STEP 1.1: Particles

        self.__iParticles = _iParticles

        #   endregion

        #   region STEP 1.2: Init flags

        self.__bPsoState = [False, False, False, False]
        self.__bBeeState = [False, False, False, False]
        self.__bNemState = [False, False, False, False]

        #   endregion

        #   region STEP 1.3: bools

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #   endregion

        #   region STEP 1.4: Other

        self.__data = [None]

        #   endregion

        #endregion

        #region STEP 2: Public variables

        #   region STEP 2.1: Paricles

        self.lParticles = []

        self.lBestSolution = None
        self.fBestSolution = np.inf
        self.iBestSolution = 0

        #   endregion

        #   region STEP 2.2: PSO

        self.psoPhi1 = 0.0
        self.psoPhi2 = 0.0
        self.psoN = 0.0
        self.psoX = 0.0

        #   endregion

        #   region STEP 2.3: BEE

        #idk

        #   endregion

        #   region STEP 2.4: Nelder-Mead

        self.NM_Alpha = None
        self.NM_Beta = None
        self.NM_Gamma = None
        self.NM_Sigma = None

        self.NM_State = None
        self.NM_lGetFitness = None

        #
        #   endregion

        #   region STEP 2.5: Bools

        self.bShowOutPut = self.__cf.data["parameters"]["show output"]

        #   endregion

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #   region STEP 4.1: Particles

        for _ in range(0, _iParticles):
            self.lParticles.append(UwU())

        #   endregion

        #endregion

        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Sets

    def setParticlePositions(self, _lData: list) -> None:
        """
            - **Description**::

            Sets the current positions of all the particles
            
            |\n
            |\n
            |\n
            |\n
            |\n
            - **Parameters**::

                :param _lData: >> (list) The current position of each particle

            - **Return**::

                :return: >> (None)
        """

        #   STEP 1: Check that the data size is correct
        if (len(_lData) != self.__iParticles):
            raise Exception(
                "Error in Swarm.setParticlePositions: Wrong data size for position initialization"
            )

        #   STEP 2: Do the loopdy loop
        for i in range(0, self.__iParticles):
            self.lParticles[i].lCurrPosition = _lData[i]

        #   STEP 3: Return
        return

    def setParticleFitness(self, _lData: list) -> None:
        """
            - **Description**::

            Sets the current fitness of all the particles
            
            |\n
            |\n
            |\n
            |\n
            |\n
            - **Parameters**::

                :param _lData: >> (list) The current fitness of each particle

            - **Return**::

                :return: >> (None)
        """

        #   STEP 0: Local variables

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check that the data size is correct
        if (len(_lData) != self.__iParticles):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Swarm.setParticleFitness() -> Step 2: Wrong data size for fitness initialization"
            )

        #   STEP 4: Be safe OwO
        try:
            #   STEP 5: Do the loopdy loop
            for i in range(0, self.__iParticles):
                #   STEP 6: Check if fitness is list
                if (type(_lData[i]) == list):
                    #   STEP 7: Setup - Tmp variables
                    fSum = 0.0

                    #   STEP 8: Loop through list
                    for j in range(0, len(_lData[i])):
                        #   STEP 9: Sum fitness values
                        fSum += _lData[i][j]

                    #   STEP 10: Set particle fitness
                    self.lParticles[i].setFitness(fSum)

                #   STEP 11: Fitness isn't a list
                else:
                    #   STEP 12: Set particle fitness
                    self.lParticles[i].setFitness(_lData[i])

                #   STEP 13: Check if current particle fitness is new best
                if (self.lParticles[i].fFitness < self.fBestSolution):
                    #   STEP 14: Update - Best variables
                    self.iBestSolution = i
                    self.lBestSolution = self.lParticles[i].lCurrPosition

                    self.fBestSolution = self.lParticles[i].fFitness

        #   STEP 15: Oopsie daisie
        except Exception as ex:
            #   STEP 16: Whoopsy daisy
            print("Initial error: ", ex)
            raise Exception("Error in Swarm.setParticleFitness() -> Step 15")

        #   STEP 17: Return
        return

    #
    #   endregion

    #   region Front-End: Particle-Swarm-Optimization

    def pso(self) -> None:
        """
			Description:

                This function moves all the swarm particles according to the 
                PSO algorithm. C1 and C2 control the velocity and thus are
                called the accelration constants. Small C values allow the 
                particles to explore further away from gBest whereas larger
                C values encourage particles to search more intensively in
                regions close to gBest. R1 and R2 are the social factors and
                ensure that the algorithm is stochastic. X is the inertial 
                weight factor. A large X value causes the algorithm to search
                for a solution globally whereas a small X value allows the 
                algorithm to search local minima more thoroughly

			|\n
			|\n
			|\n
			|\n
			|\n
		"""

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if PSO is initialized
        if (self.__bPsoState[3] != True):
            #   STEP 3: Check if it should be
            if ((self.__bPsoState[0] == True) and (self.__bPsoState[1] == True)
                    and (self.__bPsoState[2] == True)):
                #   STEP 4: Set initialized flag and continue
                self.__bPsoState[3] = True

            else:
                #   STEP 5: Error handling
                raise Exception(
                    "An error occured in Swarm.psoSwarm() -> Step 3: PSO algorithm not initialized"
                )

        #   STEP 6: Loop through UwU
        for i in range(0, self.__iParticles):
            #   STEP 7: Get particle positions
            lPCurrTmp = self.lParticles[i].lCurrPosition
            lPBestTmp = self.lParticles[i].lBestPosition
            lPVelocity = self.lParticles[i].lVelocity

            #   STEP 8: Calculate velocity constant
            lAlpha1 = self.psoPhi1 * (np.array(lPBestTmp, dtype="object") -
                                      np.array(lPCurrTmp, dtype="object"))
            lAlpha2 = self.psoPhi2 * (
                np.array(self.lBestSolution, dtype="object") -
                np.array(lPCurrTmp, dtype="object"))
            lAlphaTmp = lAlpha1 + lAlpha2
            lAlphaTmp = lPVelocity + lAlphaTmp
            lBeta = self.psoX * (lAlphaTmp)

            #   STEP 9: Set the particels new velocity and position
            self.lParticles[i].lVelocity = lBeta
            self.lParticles[i].lCurrPosition = np.array(
                lPCurrTmp, dtype="object") + np.array(lBeta, dtype="object")

        return

    #       region Front-End-(Particle-Swarm-Optimization): Init

    def initPsoPositions(self, _lData: list) -> None:
        """
            - **Description**::

            Sets the current positions of all the particles for the PSO algorithm
            
            |\n
            |\n
            |\n
            |\n
            |\n
            - **Parameters**::

                :param _lData: >> (list) The current position of each particle

            - **Return**::

                :return: >> (None)
        """

        #   STEP 1: Be safe OwO
        try:
            #   STEP 2: Out-source the work =]
            self.setParticlePositions(_lData)

            #   STEP 3: Init Velocities
            for i in range(0, self.__iParticles):

                #   STEP 4: Create velocity
                lVel = []

                #   STEP 5: Loop through list
                for j in range(0, len(_lData[i])):
                    #   STEP 6: Check if 2D list
                    if (type(_lData[i][j]) == list):
                        #   STEP 7: Append zeros to velocity
                        lVel.append(np.zeros(len(_lData[i][j])))

                    #   STEP 8: Not a list
                    else:
                        #   STEP 9: Append zero to velocity
                        lVel.append(0.0)

                #   STEP 10: Set velocity
                self.lParticles[i].lVelocity = lVel

        except Exception as ex:
            #   STEP 11: Whoopsy daisy
            print("Initial error: ", ex)
            raise Exception("Error in Swarm.psoInitParticlePositions()")

        #   STEP 12: Set PSO positions flag to True
        self.__bPsoState[0] = True

        #   STEP 13: Return
        return

    def initPsoFitness(self, _lData: list) -> None:
        """
            - **Description**::

            Sets the current fitness of all the particles for the PSO algorithm
            
            |\n
            |\n
            |\n
            |\n
            |\n
            - **Parameters**::

                :param _lData: >> (list) The current fitness of each particle

            - **Return**::

                :return: >> (None)
        """

        #   STEP 1: Be safe OwO
        try:
            #   STEP 2: Out-source the work =]
            self.setParticleFitness(_lData)

        except Exception as ex:
            #   STEP 2.?: Whoopsy daisy
            print("Initial erro: ", ex)
            raise Exception(
                "Error in Swarm.psoInitParticleFitness: Wrong data size for position initialization"
            )

        #   STEP TODO: Set PSO fitness flag to True
        self.__bPsoState[1] = True

        #   STEP TODO + 1: Return
        return

    def initPsoParams(self, _fPhi1: float, _fPhi2: float, _fN: float) -> None:
        """
            - **Description**::

            Sets the parameters for the PSO algorithm
            
            |\n
            |\n
            |\n
            |\n
            |\n
            - **Parameters**::

                :param _fC1: >> (float) Acceleration Constant 1
                :param _fR1: >> (float) Social Factor 1
                :param _fC2: >> (float) Acceleration Constant 2
                :param _fR2: >> (float) Social Factor 2
                :param _fN: >> (float) exploitation co-effiecient

            - **Return**::

                :return: >> (None)
        """

        #   STEP 1: Set the algorithm parameters
        self.psoPhi1 = _fPhi1
        self.psoPhi2 = _fPhi2
        self.psoN = _fN

        #   STEP 2: Update PSO algorithm parameters
        if (self.updatePsoParams()):

            #   STEP 3: Set PSO parameters flag to True
            self.__bPsoState[2] = True

        #   STEP 4: Return
        return

    def updatePsoParams(self) -> bool:
        """
            - **Description**::

            Updates the parameters for the PSO algorithm
            
            |\n
            |\n
            |\n
            |\n
            |\n
        """

        #   STEP 2: Get total phi
        fPhi = self.psoPhi1 + self.psoPhi2

        #   STEP 3: Return and set X
        if (fPhi > 4.0):
            self.psoX = 2.0 * self.psoN / abs(2.0 - fPhi - np.sqrt(fPhi *
                                                                   (fPhi - 4)))
            return True

        else:
            return False

    #
    #       endregion

    #
    #   endregion

    #   region Front-End: Bee-Colony-Optimization

    def bee(self) -> None:
        """
            Description:

                This funciton moves all the bee particles according to my
                revised ABC algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Variables:

                ~ bUseRegionFitness = ( bool ) Whether ot not a worker bee
                    should get the fitness of multiple samples within a 
                    region or not

                ~ bEvaluatorsAsScouts   = ( bool ) Whether or not to use
                    evaluator bees as scouts while the bees they employ are
                    out

                ~ bEvaluatorMemory  = ( bool ) Whether or not the evaluator
                    bees should have decaying memory

                ~ bHiveMovement = ( bool ) Whether or not the hive should be 
                    allowed to move to a more suitable location

                ~ bBirthDeath   = ( bool ) Whether or not the colony should be
                    allowed to grow if conditions are favorable

                ~ bWorkerDistraction    = ( bool ) Whether or not to allow 
                    worker bee distraction

                |\n

                ~ iNum_Workers  = ( int ) The number of worker bees in the
                    colony

                ~ iNum_Scounts  = ( int ) The number of scout bees in the 
                    colony

                ~ iNum_Evaluators   = ( int ) The number of evaluator bees in
                    the colony

                |\n

                ~ fSpeed_Worker = ( float ) The speed at which a worker bee
                    moves in the search space

                ~ fSpeed_Scout  = ( float ) The speed at which a scout bee
                    moves in the search space

                ~ fSpeed_Eval   = ( float ) The speed at which an evaluator
                    bee moves in the search space

                ~ fSpeed_Queen  = ( float ) The speed at which the queen bee
                    moves in the search space

                |\n

                ~ iSamples_Worker   = ( int ) The number of samples a worker
                    bee should collect in a region before returning to the hive

                ~ fRequiredFitness_WorkerDistractionStart   = ( float ) The
                    required minimum fitness of a candidate for a worker bee to
                    become distracted from its current task

                ~ iSamples_WorkerDistraction    = ( int ) A semi-random number
                    of how many samples a worker should take when distracted
                    before resuming its original task

                ~ fRequiredFitness_WorkderDistractionTot    = ( float ) The 
                    total fitness of the distraction region that is required
                    for the distraction to be useful

                |\n

                ~ fRatio_ScoutReturn    = ( float ) The ratio of worker bees
                    that need to have returned before the scout bees are 
                    signalled ot return

                ~ fRequiredFitness_Scout    = ( float ) The minimum viable
                    fitness of a candidate that is required for a scout to
                    return to the hive before being signalled to do so
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if not initialized
        if (self.__bBeeState[3] != True):
            #   STEP 3: Check if should be initialized
            if ((self.__bBeeState[0]) and (self.__bBeeState[1])
                    and (self.__bBeeState[2])):
                #   STEP 4: Set init flag
                self.__bBeeState[3] = True

                #   STEP 5: Energize
                self.__initBee__()

            else:
                #   STEP 6: Error handling
                raise Exception(
                    "An error occured in SwarmChan.bee() -> Step 3: Bee-Colony required paramaters not initialized"
                )

        #   STEP 7: Perform evaluator actions
        self.__beeEvaluators__()

        #   STEP 10: Perform Queen bee actions
        self.__beeQueen__()

        #   STEP 8: Perform worker bee actions
        self.__beeWorkers__()

        #   STEP 9: Perform scout bee actions
        self.__beeScouts__()

        #   STEP ??: Return
        return

    #       region Front-End-(Bee-Colony-Optimization): Init

    def initBeePositions(self, **kwargs) -> None:
        """
            Description:

                Initializes the starting positions for the Bee-Colony-Optimization
                algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + positions = ( list ) List of starting information positions
                    ~ Required
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if swarm type is uninitialized
        if (self.__data[0] == None):
            #   STEP 3: Init swarm to be of type Bee-Colony
            self.__setSwarmType__(type=sw.BEE)

        #   STEP 4: Swarm type is initialized - check if type is Bee-Colony
        elif (self.__data[0] != sw.BEE):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeePositions() -> Step 4: Incorrect swarm type"
            )

        #   STEP 6: Check if positions passed
        if ("positions" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeePositions() -> Step 6: No positions argument passed"
            )

        #   STEP 8: Check if starting fitnesses initialized
        if ("starting fitnesses" in self.__data[1]):
            #   STEP 9: Check that the length of the fitness list corresponds to the lenght of the position list
            if (len(kwargs["positions"]) != len(
                    self.__data[1]["starting fitnesses"])):
                #   STEP 10: Error handling
                raise Exception(
                    "An error occured in SwarmChan.initBeePositions() -> Step 9: Position list length doesn't match fitness list length"
                )

        #   STEP 11: Set starting to positions
        self.__data[1]["starting positions"] = kwargs["positions"]

        #   STEP 12: Set init flag
        self.__bBeeState[0] = True

        #   STEP 13: Return
        return

    def initBeeFitness(self, **kwargs) -> None:
        """
            Description:

                Initializes the starting fitnesses for the Bee-Colony-Optimization
                algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + fitness   = ( list ) The fitnesses of the starting positions
                    ~ Required
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if swarm type is unintialized
        if (self.__data[0] == None):
            #   STEP 3: Init swarm to be of type Bee-Colony
            self.__setSwarmType__(type=sw.BEE)

        #   STEP 4: Swarm type is initialized - check if type is Bee-Colony
        elif (self.__data[0] != sw.BEE):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeeFitness() -> Step 4: Incorrect swarm type"
            )

        #   STEP 6: Check if fitness arg passed
        if ("fitness" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeeFitness() -> Step 6: No fitness argument passed"
            )

        #   STEP 8: Check if starting positions initialized
        if ("starting positions" in self.__data[1]):
            #   STEP 9: Check that length of position list corresponds to length of fitness list
            if (len(kwargs["fitness"]) != len(
                    self.__data[1]["starting positions"])):
                #   STEP 10: Error handling
                raise Exception(
                    "An error occured in SwarmChan.initBeeFitness() -> Step 9: Fitness length doesn't match position list length"
                )

        #   STEP 11: Set starting fitnesses
        self.__data[1]["starting fitnesses"] = kwargs["fitness"]

        #   STEP 12: Set init flag
        self.__bBeeState[1] = True

        #   STEP 13: Return
        return

    def initBeeParams(self, **kwargs) -> None:
        """
            Description:

                Saves the swarms parameters until the point that they are used
                during the initialization of the Bee-Colon-Optimization 
                algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + params    = ( dict ) A dictionary containing the parameters
                    for the algorithm
                    ~ Required
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if swarm type is uninitialized
        if (self.__data[0] == None):
            #   STEP 3: Init swarm to be of type Bee-Colony
            self.__setSwarmType__(type=sw.BEE)

        #   STEP 4: Swarm type is initialized - check if type is Bee-Colony
        elif (self.__data[0] != sw.BEE):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeeParams() -> Step 4: Incorrect swarm type"
            )

        #   STEP 6: Check if params arg passed
        if ("params" not in kwargs):
            #   STEP 7: Error handling
            raise Exception(
                "An error occured in SwarmChan.initBeeParams() -> Step 6: No pamars argument passed"
            )

        #   STEP 8: Set parameters
        self.__data[1]["parameters"] = kwargs["params"]

        #   STPE 9: Set init flag
        self.__bBeeState[2] = True

        #   STEP 10: Return
        return

    #
    #       endregion

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Sets

    def __setSwarmType__(self, **kwargs) -> None:
        """
            Description:

                Sets the type of swarm for this instance. Required for more 
                complicated algorithms.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + type  = ( enum ) The type of swarm
                    ~ Required
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if type arg was passed
        if ("type" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SwarmChan.__setSwarmType__() -> Step 2: No type argument passed"
            )

        #   STEP 4: Check if passed type is swarm enum
        if (sw.isEnum(kwargs["type"]) == False):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in SwarmChan.__setSwarmType__() -> Step 4: Invalid enum passed"
            )

        #   STEP 6: Set class swarm type
        self.__data[0] = kwargs["type"]

        #   STEP 7: Check if BCO
        if (kwargs["type"] == sw.BEE):
            #   STEP 8: Add required dictionaries for BCO
            self.__data.append({})
            self.__data.append({})

        #   STEP 7: Return
        return

    #
    #   endregion

    #   region Back-End: Inits

    def __initBee__(self) -> None:
        """
            Description:

                Initializes the Bee-Colony using the saved algorithm parameters.

            |\n
            |\n
            |\n
            |\n
            |\n

            Pseudo:
            
                generate queen
                generate workers
                generate scouts
                generate evaluators

                split workers evenly between evaluators
                split scouts evenly between evaluators

                split starting information evenly between evaluators
        """

        #   STEP 0: Local variables
        dParams = self.__data[1]["parameters"]

        lPositions = self.__data[1]["starting positions"]
        lFitness = self.__data[1]["starting fitnesses"]

        lTmp = None

        lWorkers = None
        lScouts = None

        #   STEP 1: Setup - Local variables

        #   region STEP 2->4: Generate Queen

        #   STEP 2: Generate queen
        uQueen = UwU()

        #   STEP 3: Populate queen data
        uQueen.lCurrPosition = dParams["hive position"]

        #   STEP 4: Add queen to swarm data
        self.__data[2]["queen"] = uQueen

        #
        #   endregion

        #   region STEP 5->11: Generate workers

        #   STEP 5: Set temp vars
        lTmp = []

        #   STEP 6: Iterate through num workers
        for _ in range(0, dParams["num workers"]):
            #   STEP 7: Create new worker
            uWorker = UwU()

            #   STEP 8: Set worker starting position
            uWorker.lCurrPosition = dParams["hive position"]

            #   STEP 9: Add requried worker data
            uWorker.data["evaluator"] = None

            uWorker.data["state"] = "reporting"

            uWorker.data["destination"] = None

            uWorker.data["distraction threshold"] = dParams["worker"][
                "disctraction threshold"] + rn.random(
                ) * dParams["worker"]["disctraction offset"] * 2.0 - dParams[
                    "worker"]["distraction offset"]

            uWorker.data["memRoute"] = {
                "items": 0,
                "positions": [],
                "fitness": [],
            }

            uWorker.data["memRegion"] = {
                "items": 0,
                "positions": [],
                "fitness": []
            }

            uWorker.data["memDistraction"] = {
                "items": 0,
                "positions": [],
                "fitness": []
            }

            #   STEP 10: Append to worker list
            lTmp.append(uWorker)

        #   STEP 11: Add workers to swarm data
        self.__data[2]["workers"] = lTmp

        lWorkers = self.__data[2]["workers"]

        #
        #   endregion

        #   region STEP 12->18: Generate scouts

        #   STEP 12: Set tmp variables
        lTmp = []

        #   STEP 13: Iterate through required scouts
        for _ in range(0, dParams["num scouts"]):
            #   STEP 14: Create new scout
            uScout = UwU()

            #   STEP 15: Set scout starting position
            uScout.lCurrPosition = dParams["hive position"]

            #   STEP 16: Add required scout data
            uScout.data["evaluator"] = None

            uScout.data["state"] = "reporting"

            uScout.data["destination"] = None

            uScout.data["mem"] = {"items": 0, "positions": [], "fitness": []}

            #   STEP 17: Append to scout list
            lTmp.append(uScout)

        #   STEP 18: Add scout to swarm data
        self.__data[2]["scouts"] = lTmp
        lScouts = self.__data[2]["scouts"]

        #
        #   endregion

        #   region STEP 19->25: Generate evaluators

        #   STEP 19: Set tmp variable
        lTmp = []

        #   STEP 20: Iterate through required evaluators
        for _ in range(0, dParams["num evals"]):
            #   STEP 21: Create new evaluator
            uEval = UwU()

            #   STEP 22: Set evaluator starting position
            uEval.lCurrPosition = dParams["hive position"]

            #   STEP 23: Add required eval data
            uEval.data["workers"] = []

            uEval.data["scouts"] = []

            uEval.data["memory"] = {
                "items": 0,
                "positions": [],
                "fitness": [],
                "age": []
            }

            uEval.data["state"] = "starting"

            uEval.data["destination"] = None

            uEval.data["mem scout"] = {
                "itmes": 0,
                "positions": [],
                "fitness": [],
            }

            uEval.data["best"] = {"position": None, "fitness": None}

            #   STEP 24: Append to evaluator list
            lTmp.append(uEval)

        #   STEP 25: Add evaluator list to swarm data
        self.__data[2]["evaluators"] = lTmp

        #
        #   endregion

        #   region STEP 26->32: Split workers between evaluators

        #   STEP 26: Get max workers per evaluator
        iMaxWorkers = int(1.1 * dParams["num workers"] / dParams["num evals"])

        #   STEP 27: Iterate through workers
        for i in range(0, dParams["num workers"]):
            #   STEP 28: While true
            while (True):
                #   STEP 29: Pick a random evaluator
                iIndex = rn.randint(0, dParams["num evals"] - 1)

                #   STEP 30: Check that that eval doesn't have too many workers already
                if (len(lTmp[iIndex].data["workers"]) < iMaxWorkers):
                    #   STEP 31: Add connection data
                    lTmp[iIndex].data["workers"].append(i)
                    lWorkers[i].data["evaluator"] = iIndex

                    #   STEP 32: Exit while loop
                    break

        #
        #   endregion

        #   region STEP 33->39:Split scouts between evaluators

        #   STEP 33: Get max scouts per evaluator
        iMaxScouts = int(1.2 * dParams["num scouts"] / dParams["num evals"])

        #   STEP 34: Iterate through scouts
        for i in range(0, dParams["num scouts"]):
            #   STEP 35: While loop
            while (True):
                #   STEP 36: Pick a random evaluator
                iIndex = rn.randint(0, dParams["num evals"] - 1)

                #   STEP 37: Check that that eval doesn't have too many scouts already
                if (len(lTmp[iIndex].data["scouts"]) < iMaxScouts):
                    #   STEP 38: Add connection data
                    lTmp[iIndex].data["scouts"].append(i)
                    lScouts[i].data["evaluator"] = iIndex

                    #   STEP 39: Exit while loop
                    break

        #
        #   endregion

        #   region STEP 40->45: Split data between evaluators

        #   STEP 40: Get max info per evaluator
        iMaxInfo = int(1.2 * len(lPositions) / dParams["num evals"])

        #   STEP 41: Iterate through info
        for i in range(0, len(lPositions)):
            #   STEP 42: While loop
            while (True):
                #   STEP 43: Pick a random evaluator
                iIndex = rn.randint(0, dParams["num evals"] - 1)

                #   STEP 44: Check that that eval doesn't have too much info already
                if (lTmp[iIndex].data["memory"]["items"] < iMaxInfo):
                    #   STEP 44: Add data
                    dMem = lTmp[iIndex].data["memory"]

                    dMem["positions"].append(lPositions[i])
                    dMem["fitness"].append(lFitness[i])
                    dMem["age"].append(0)

                    dMem["items"] += 1

                    #   STEP 45: Exit while loop
                    break

        #
        #   endregion

        #   STEP 46: Return
        return

    #
    #   endregion

    #   region Back-End: Bee-Colony-Optimization

    #       region Back-End-(Bee-Colony-Optimization): Sets

    def __setBeeWorkerDest__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __setBeeScoutDest__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __setBeeEvalDest__(self) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    #
    #       endregion

    #       region Back-End-(Bee-Colony-Optimization): Gets

    def __getBeeEvalMem__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __getBeeEmployeeState__(self, **kwargs) -> bool:
        """
            Description:

                Returns true if all the employees of the specified evaluator
                bee are currently in a "reporting" state.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + evaluator = ( int ) The index of the evaluator whoese
                    employees should be checked
                    ~ Required

            |\n

            Returns:

                + bPresent  = ( bool ) A flag that represents whether or not
                    all this evalutor's employees are currently in a 
                    "reporting" state.
        """

        #   STEP 0: Local variables
        lWorkers = None
        lScouts = None

        uEval = None

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if evaluator arg passed
        if ("evaluator" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in SwarmChan.__getBeeEmployeeState__() -> Step 2: No evaluator argument passed"
            )

        else:
            #   STEP 4: Init local vars
            lWorkers = self.__data[2]["workers"]
            lScouts = self.__data[2]["scouts"]

            uEval = self.__data[2]["evaluators"][kwargs["evaluator"]]

        #   STEP 5: Iterate through evaluator's worker bees
        for i in range(0, len(uEval.data["workers"])):
            #   STEP 6: Get worker index
            iWorker = uEval.data["workers"][i]

            #   STEP 7: Check if worker is reporting
            if (lWorkers[iWorker].data["state"] != "reporting"):
                #   STEP 8: Return
                return False

        #   STEP 9: Iterate through evaluator's scout bees
        for i in range(0, len(uEval.data["scouts"])):
            #   STEP 10: Get scout index
            iScout = uEval.data["scouts"][i]

            #   STEP 11: Check if scout is not reporting
            if (lScouts[iScout].data["state"] != "reporting"):
                #   STEP 12: Retur of the jedi
                return False

        #   STEP 13: Return of more jedii
        return True

    #
    #       endregion

    #       region Back-End-(Bee-Colony-Optimization): Moves

    def __moveBeeEval__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __moveBeeQueen__(self) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __moveBeeScout__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    def __moveBeeWorker__(self, **kwargs) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return

    #
    #       endregion

    #       region Back-End-(Bee-Colony-Optimization): Bees

    def __beeEvaluators__(self) -> None:
        """
            Description:

                Peforms the actions of the evaluator bees for this iteration.

            |\n
            |\n
            |\n
            |\n
            |\n

            Pseudo:

                for all evaluators
                    if evaluator at hive
                        if all employees reporting 
                            mem = get data from employees

                        elif starting
                            mem = self.mem

                        else
                            continue

                        mem best = choose best data from mem
                            if employee distraction data is bad lower distraction threshold for all
                            workers

                        split workers between mem best regions

                        get random point in respective region for each worker
                            reset   worker.memroute
                                worker.memRegion
                                worker.memDistraction

                            set worker.destination
                                worker.state

                        decide on new region for each scout

                        reset scout.mem

                        set scout.state
                            scout.destination

                        save best data

                    else
                        if all employees reporting
                            return to hive

                        else
                            scout self.destination
                            save fitness to self.mem scout                            
        """

        #   STEP 0: Local variables
        lEvaluators = self.__data[2]["evaluators"]

        bPresent = None

        #   STEP 1: Setup - Local variables

        #   STEP 2: Iterate through evaluators
        for i in range(0, len(lEvaluators)):
            #   STEP 3: Get current evaluator
            uEval = lEvaluators[i]

            #   STEP 4: Check if evaluator at hive
            if ((uEval.data["state"] == "starting")
                    or (uEval.data["state"] == "waiting")):
                #   STEP 5: Set tmp variable
                bPresent = True

                #   STEP 6: Check if waiting
                if (uEval.data["state"] == "waiting"):
                    #   STEP 7: Get employee states
                    bPresent = self.__getBeeEmployeeState__(evaluator=i)

                #   STEP 8: Check if all employees are present
                if (bPresent == True):
                    #   STEP 9: Get evaluator memory
                    self.__getBeeEvalMem__(evaluator=i)

                    #   STEP 10: Set worker destinations
                    self.__setBeeWorkerDest__(evaluator=i)

                    #   STEP 11: Set scout destinations
                    self.__setBeeScoutDest__(evaluator=i)

                else:
                    #   STEP 12: All employees not present, skip this evaluator for now
                    continue

            else:
                #   STEP 13: Check if all employees reporting
                if (self.__getBeeEmployeeState__(evaluator=i) == True):
                    #   STEP 14: Return to base
                    self.__moveBeeEval__(destination="base")

                else:
                    #   STEP 15: Continue scouting
                    self.__moveBeeEval__(destination="scout")

        #   STEP 16: Return
        return

    def __beeQueen__(self) -> None:
        """
            Description:

                Performs the actions of the queen bee for this iteration.

            |\n
            |\n
            |\n
            |\n
            |\n

            Pseudo:

                if evaluators as scouts
                    for all evaluators
                        if evaluator waiting
                            best mem = update mem
                            decide on region near hive to search

                            reset eval.mem scout
                            set eval.state
                                eval.destination

                if birth death
                    if birth
                        generate P new bees
                        assign bees to respective positions

                    if death
                        remove P bees

                if hive movement
                    if good food source far from hive and not moving atm
                        pos = new position that would be better for collecting food

                    elif moving atm
                        moveBeeQueen
        """

        #   STEP 0: Local variables
        dParams = self.__data[1]["parameters"]

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if evaluators need to move
        if (dParams["evals as scouts"] == True):
            #   STEP 3: Outsource eval movement
            self.__setBeeEvalDest__()

        #   STEP 4: Check if birth death process
        if (dParams["birth death"] == True):
            #   STEP 5: Outsource birth death
            self.__beeBirthDeath__()

        #   STEP 6: Check if hive movement
        if (dParams["hive movement"] == True):
            #   STEP 7: Outsource hive movement
            self.__moveBeeQueen__()

        #   STEP 8: Return
        return

    def __beeScouts__(self) -> None:
        """
            Description:

                Performs the actions of the scout bee for this iteration of the
                algorithm.

            |\n
            |\n
            |\n
            |\n
            |\n

            Pseudo:

                for all scouts
                    if scout not reporting
                        __moveBeeScout__(scout)
        """

        #   STEP 0: Local variables
        lScouts = self.__data[2]["scouts"]

        #   STEP 1: Setup - Local variables

        #   STEP 2: Iterate through scouts
        for i in range(0, len(lScouts)):
            #   STEP 3: Check that scout not reporting
            if (lScouts[i].data["state"] != "reporting"):
                #   STEP 4: Move scout
                self.__moveBeeScout__(scout=i)

        #   STEP 5: Return
        return

    def __beeWorkers__(self) -> None:
        """
            Description:

                Performs the actions of the worker bee for this iteration.

            |\n
            |\n
            |\n
            |\n
            |\n

            Pseudo:

                for all workers
                    if worker is moving
                        __moveBeeWorker__(worker)
        """

        #   STEP 0: Local variables
        lWorkers = self.__data[2]["workers"]

        #   STEP 1: Setup - Local variables

        #   STEP 2: Iterate through worker
        for i in range(0, len(lWorkers)):
            #   STEP 3: If worker not reporting
            if (lWorkers[i].data["state"] != "reporting"):
                #   STEP 4: Move worker bee
                self.__moveBeeWorker__(worker=i)

        #   STEP ??: Return
        return

    #
    #       endregion

    #       region Back-End-(Bee-Colony-Optimization): Other

    def __beeBirthDeath__(self) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        #   STEP ??: Return
        return
Beispiel #26
0
class Garry:

    #region Init
    """
    """
    def __init__(self, _iPopSize: int) -> None:

        #region STEP 0: Local variables

        self.__enum = en.Garry
        self.__cf = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        #   region STEP 1.1: Pop size

        self.__iPopSize = _iPopSize

        #   endregion

        #   region STEP 1.2: Init flags

        self.__bTroState = [False, False, False]

        #   endregion

        #   region STEP 1.3: Bools

        self.__bAllowTesting = self.__cf.data["parameters"]["allow testing"]

        #   endregion

        #endregion

        #region STEP 2: Public variables

        #   region STEP 2.1: Population

        self.lPopulation = []

        self.vBestSolution = None
        self.fBestSolution = np.inf
        self.iBestSolution = 0

        #   endregion

        #   region STEP 2.2: TRO

        self.iTroRegion = None
        self.lTroBest = None

        #   endregion

        #   region STEP 2.3: Bools

        self.bShowOutput = self.__cf.data["parameters"]["show output"]

        #   endregion

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion

        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Sets

    def setPopulation(self, **kwargs) -> None:
        """
            Description:

                Initializes the population using the provided list of candidate
                positions

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + candidates    = ( list ) List of candidate positions
                    ~ Required

        """

        #   STEP 0: Local variables
        candidates = None

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if candidate
        if ("candidates" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Garry.setPopulation() -> Step 2: No candidate list passed"
            )

        #   STEP 4: Check that candidate list is right length
        if (len(kwargs["candidates"]) != self.__iPopSize):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in Garry.setPopulation() -> STEp 4: Incorrect candidate list length passed"
            )

        #   STEP 6: Set the class population
        if (len(self.lPopulation) > 0):
            #   STEP 7: Reset class candidate list
            self.lPopulation = []

        #   STEP 8: Init temp variable
        candidates = kwargs["candidates"]

        #   STEP 9: Iterate through candidates
        for i in range(0, len(candidates)):
            #   STEP 10: Create new candidate
            candy = UwU()

            #   STEP 11: Populat new candidate data
            candy.lCurrPosition = candidates[i]

            #   STEP 12: Append new candidate to population list
            self.lPopulation.append(candy)

        #   STEP 13: Return
        return

    def setFitness(self, **kwargs) -> None:
        """
            Description:

                Sets the fitness for the current population.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + fitness   = ( list ) Fitness values for the population
                    ~ Required

        """

        #   STEP 0: Local variables
        fitness = None

        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if fitness passed
        if ("fitness" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Garry.setFitness() -> Step 2: No fitness list passed"
            )

        #   STEP 4: Check fitness list length
        if not ((len(kwargs["fitness"]) == len(self.lPopulation)) and
                (len(self.lPopulation) == self.__iPopSize)):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in Garry.setFitness() -> Step 4: List length mismatch"
            )

        #   STEP 5: Set local variable
        fitness = kwargs["fitness"]

        #   STEP 6: Iterate through fitness list
        for i in range(0, len(fitness)):
            #   STEP 7: Check if fitness is list
            if (type(fitness[i]) == list):
                #   STPE 8: Setup - Temp vars
                fTmp_TotalFitness = 0.0

                #   STEP 9: Iterate through list
                for j in range(0, len(fitness[i])):
                    #   STEP 10: Sum
                    fTmp_TotalFitness += fitness[i][j]

                #   STEP 11: Set pop fitness
                self.lPopulation[i].fFitness = fTmp_TotalFitness

            #   STEP 12: Not list
            else:
                #   STEP 13: Set pop fitness
                self.lPopulation[i].fFitness = fitness[i]

            #   STEP 14: Check if new best fitness
            if (self.lPopulation[i].fFitness < self.fBestSolution):
                #   STEP 15: Set best candidate variables
                self.fBestSolution = self.lPopulation[i].fFitness
                self.vBestSolution = self.lPopulation[i]
                self.iBestSolution = i

        #   STEP 16: Return
        return

    #
    #   endregion

    #   region Front-End: Trust-Region-Optimization

    def tro(self) -> list:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if initialized
        if not (self.__bTroState[2]):
            #   STEP 3: Check if should be initialized
            if ((self.__bTroState[0]) and (self.__bTroState[1])):
                #   STEP 4: Set initialized flag
                self.__bTroState[2] = True

            else:
                #   STEP 5: Error handling
                raise Exception(
                    "An error occured in Garry.tro() -> Step 3: Trust-region optimization algorithm not initialized"
                )

        #   STEP 6: Check if new fittest solution is better than previous solution
        if (self.fBestSolution < self.lTroBest[1]):
            #   STEP 7: Adjust tro best candidate
            self.lTroBest[0] = self.vBestSolution
            self.lTroBest[1] = self.fBestSolution

            #   STEP 8: Increase trust region
            self.iTroRegion += 1

        else:
            #   STEP 9: Decrease trust region
            self.iTroRegion -= 1

        #   STEP 10: Clear irrelevant data
        self.lPopulation = []
        self.vBestSolution = None
        self.fBestSolution = np.inf
        self.iBestSolution = 0

        #   STEP 11: Return
        return

    #       region Front-End-(Trust-Region-Optimization): Init

    def initTroParticles(self, **kwargs) -> None:
        """
            Description:

                Initializes the initial candidates for the trust-region
                optimization process.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:
            
                + candidates    = ( list ) List of candidates

        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if candidates passed
        if ("candidates" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Garry.initTroParticles() -> Step 2: No candidate list"
            )

        #   STEP 4: Check that candidates is greater than zero
        if (len(kwargs["candidates"]) != 1):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in Garry.initTroParticles() -> Step 4: Invalid candidate list passed"
            )

        #   STEP 8: Create new candidate
        pop = UwU()

        #   STEP 9: Set candidate data
        pop.lCurrPosition = kwargs["candidates"][0]

        #   STEP 10: Append to class population list
        self.lPopulation.append(pop)

        #   STEP 11: Set best solution
        self.vBestSolution = self.lPopulation[0]
        self.iBestSolution = 0

        #   STEP 12: Init tro best variables
        if (self.lTroBest == None):
            self.lTroBest = [None, None]

        self.lTroBest[0] = self.vBestSolution

        #   STEP 13: Set init flag
        self.__bTroState[0] = True

        #   STEP 14: Return
        return

    def initTroFitness(self, **kwargs) -> None:
        """
            Description:

                Sets the fitness of the starting population

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + fitness   = ( list ) List of fitness values for population
                    ~ Requried

        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if fitness passed
        if ("fitness" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Garry.initTroFitness() -> Step 2: No fitness list passed"
            )

        #   STEP 4: Check fitness length
        if (len(kwargs["fitness"]) != 1):
            #   STEP 5: Error handling
            raise Exception(
                "An error occured in Garry.initTroFitness() -> Step 4: Invalid fitness list passed"
            )

        #   STEP 6: Check if fitness is a list
        if (type(kwargs["fitness"][0]) == list):
            #   STEP 8: Setup - Temp vars
            fTmp_Fitness = 0.0
            lFitness = kwargs["fitness"][0]

            #   STPE 9: Iterate through list
            for i in range(0, len(lFitness)):
                #   STEP 10: Sum
                fTmp_Fitness += lFitness[i]

            #   STEP 11: Set initial best fitness
            self.fBestSolution = fTmp_Fitness

        #   STEP 12: Input fitness not a list
        else:
            #   STEP 13: Set initial candidate fitness
            self.fBestSolution = kwargs["fitness"][0]

        #   STEP 14: Init tro best variable
        if (self.lTroBest == None):
            self.lTroBest = [None, None]

        self.lTroBest[1] = self.fBestSolution

        #   STEP 15: Set init flag
        self.__bTroState[1] = True

        #   STEP 16: Return
        return

    def initTroParams(self, **kwargs) -> None:
        """
            Description:

                Initializes the trust-region optimization process' parameters.

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + region    = ( int / float ) The algorithm's initial region

        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: Check if region passed
        if ("region" not in kwargs):
            #   STEP 3: Error handling
            raise Exception(
                "An error occured in Garry.initTroParams() -> Step 2: No region passed"
            )

        #   STEP 4: Set the initial region
        self.iTroRegion = kwargs["region"]

        #   STEP 5: Set init flag
        self.__bTroState[1] = True

        #   STEP 6: return
        return

    #
    #       endregion

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Trust-Region-Optimization

    def __troStats(self) -> None:
        """
        """

        #   STEP 0: Local variables
        #   STEP 1: Setup - Local variables

        #   STEP 2: ??

        return

    #
    #   endregion

    #
    #endregion


#
#endregion
Beispiel #27
0
class Hermione:

    #region Init

    """
    """
    
    def __init__(self):

        #region STEP 0: Local variables

        self.__enum             = en.Hermione
        self.__cf               = Conny()
        self.__cf.load(self.__enum.value)

        #endregion

        #region STEP 1: Private variables

        self.__bAllowTesting        = self.__cf.data["parameters"]["allow testing"]

        #endregion

        #region STEP 2: Public variables

        self.bShowOutput            = self.__cf.data["parameters"]["show output"]

        #endregion

        #region STEP 3: Setup - Private variables

        #endregion

        #region STEP 4: Setup - Public variables

        #endregion
        
        return

    #
    #endregion

    #region Front-End

    #   region Front-End: Mapping

    def mapSurrogate(self, **kwargs) -> dict:
        """
            Description:

                Maps the passed surrogate using the passed dataset.

            |\n
            |\n
            |\n
            |\n
            |\n

            Arguments:

                + data  = ( vars ) A Data container instance containing the
                    dataset to be used for mapping
                    ~ Required

                + surrogate = ( vars ) A surrogate model to map
                    ~ Required

                + optimizer = ( enum ) The enum of the optimizer to be used
                    during the mapping process
                    ~ Default   = PSO

                + thread    = ( bool ) A flag that indicates if threading 
                    should be used to map the surrogate
                    ~ Default   = False
        """

        #   STEP 0: Local variables
        eOptimizer              = sw.PSO
        bThreading              = False

        #   STEP 1: Setup - Local variables
        
        #   region STEP 2->5: Error checking

        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Hermione.mapSurrogate() -> Step 2: No data arg passed")

        #   STEP 4: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Hermion.mapSurrogate() -> Step 4: No surrogate arg passed")
        
        #
        #   endregion

        #   region STEP 6->11: Setup - Local variables

        #   STEP 6: Check if threading arg passed
        if ("threading" in kwargs):
            #   STEP 7: Check threading status
            if (kwargs["threading"] == True):
                #   STEP 8: Update - Local variables
                bThreading  = True

        #   STEP 10: Check if optimizer arg passed
        if ("optimizer" in kwargs):
            #   STEP 11: Update - Local variables
            eOptimizer = kwargs["optimizer"]

        #
        #   endregion

        #   STEP 12: Check if optimizer is GA
        if (ga.isEnum(eOptimizer)):
            #   STEP 13: Check threading status
            if (bThreading):
                #   STEP 14: User output
                if (self.bShowOutput):
                    print("Hermione (map-srg) {" + Helga.time() + "} - Starting threaded surrogate mapping")

                #   STEP 15: Outsource and return
                return self.__mapSurrogate__(surrogate=kwargs["surrogate"], data=kwargs["data"], optimizer=eOptimizer)
            
            #   STEP 16: Not threaded
            else:
                #   STEP 17: User output
                if (self.bShowOutput):
                    print("Hermione (map-srg) {" + Helga.time() + "} - Starting surrogate mapping")

                #   STEP 18: Create new mapper
                spongebob   = SpongeBob()

                #   STEP 19: Outsource and return
                return spongebob.mapSurrogate(surrogate=kwargs["surrogate"], data=kwargs["data"], optimizer=eOptimizer)

        #   STEP 20: Check if optimizer is swarm
        elif (sw.isEnum(eOptimizer)):
            #   STPE 21: Check threading status
            if (bThreading):
                #   STEP 22: User output
                if (self.bShowOutput):
                    print("Hermione (map-srg) {" + Helga.time() + "} - Starting threaded surrogate mapping")
                
                #   STEP 23: Outsource and return
                return self.__mapSurrogate__(surrogate=kwargs["surrogate"], data=kwargs["data"], optimizer=eOptimizer)

            #   STEP 24: Not threaded
            else:
                #   STEP 25: User output
                if (self.bShowOutput):
                    print("Hermione (map-srg) {" + Helga.time() + "} - Starting surrogate mapping")

                #   STEP 26: Create new swarm handler
                sarah = Sarah()

                #   STEP 27: Outsource and return
                return sarah.mapSurrogate(surrogate=kwargs["surrogate"], data=kwargs["data"], optimizer=eOptimizer)
        
        #   STEP 29: Unidentified optimizer - Error handling
        print("Initial error: ", eOptimizer)
        raise Exception("An error occured in Natalie.mapSurrogate() -> Step 29: Unidentified optimizer")

    #
    #   endregion

    #   region Front-End: Training

    def trainSurrogate(self, **kwargs) -> vars:
        """
            Description:
            
                Trains the passed surrogate using thread techniques if required to
                do so. If an optimizer is specified, only that optimizer will be used.
                However, if no optimizer is specified a random optimizer will be used. 

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + surrogate   = ( vars ) A surrogate class instance
                    ~ Required

                + data        = ( vars ) Data container
                    ~ Required

                + password    = ( int ) Surrogate pass
                    ~ Required

                + optimizer   = ( enum ) The enum of the optimizer to be used
                    ~ Default = PSO

                + threading   = ( bool ) Multi-treading flag
                    ~ Default = False

            |\n

            Returns:

                surrogate   = ( vars ) The trained surrogate

        """

        #   STEP 0: Local variables
        eOptimizer              = sw.PSO
        bThreading              = False

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check that a surrogate was passed
        if ("surrogate" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Hermione.trainSurrogate() -> Step 2: No surrogate passed")

        #   STEP 4: Check that the data container was passed
        if ("data" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Hermione.trainSurrogate() -> Step 4: Data container not passed")

        #   STEP 6: Check that the surrogate password was passed
        if ("password" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Hermione.trainSurrogate() -> Step 4: Password associated with surrogate not passed")

        #
        #   endregion
        
        #   region STEP 8->12: Setup - Local variables

        #   STEP 8: Check if threading was specified
        if ("threading" in kwargs):
            #   STEP 9: Check if threading enabled
            if (kwargs["threading"] == True):
                #   STEP 10: Update - Local variables
                bThreading  = True

        #   STEP 11: Check if optimizer was specified
        if ("optimizer" in kwargs):
            #   STEP 12: Set optimizer
            eOptimizer = kwargs["optimizer"]

        #
        #   endregion

        #   STEP 13: Check if optimizer is GA
        if (ga.isEnum(eOptimizer)):
            #   STEP 14: Check - Threading status
            if (bThreading):
                #   STEP 15: User output
                if (self.bShowOutput):
                    print("Hermione (train-srg) {" + Helga.time() + "} - Starting threaded surrogate training")

                #   STEP 16: Outsource and return
                return self.__trainSurrogate__(surrogate=kwargs["surrogate"], data=kwargs["data"], password=kwargs["password"], optimizer=eOptimizer)

            #   STEP 17: Not threaded
            else:
                #   STEP 18: User output
                if (self.bShowOutput):
                    print("Hermione (train-srg) {" + Helga.time() + "} - Starting surrogate training")

                #   STEP 19: Create new optimizer
                sb      = SpongeBob()

                #   STEP 20: Outsource and return
                return sb.trainSurrogate(surrogate=kwargs["surrogate"], data=kwargs["data"], password=kwargs["password"], optimizer=eOptimizer)

        #   STEP 21: Check if optimizer is swarm
        if (sw.isEnum(eOptimizer)):
            #   STEP 22: Check - Threading status
            if (bThreading):
                #   STEP 23: User output
                if (self.bShowOutput):
                    print("Hermione (train-srg) {" + Helga.time() + "} - Starting threaded surrogate training")

                #   STEP 24: Outsouce and return
                return self.__trainSurrogate__(surrogate=kwargs["surrogate"], data=kwargs["data"], password=kwargs["password"], optimizer=eOptimizer)

            #   STEP 25: Not threaded
            else:
                #   STEP 26: User output
                if (self.bShowOutput):
                    print("Hermione (train-srg) {" + Helga.time() + "} - Starting surrogate training")

                #   STEP 27: Create new optimizer
                sarah       = Sarah()

                #   STEP 28: Outsource and return
                return sarah.trainSurrogate(surrogate=kwargs["surrogate"], data=kwargs["data"], password=kwargs["password"], optimizer=eOptimizer)

        #   STEP 29: Unidentified optimizer - Error handling
        print("Initial error: ", eOptimizer)
        raise Exception("An error occured in Natalie.trainSurrogate() -> Step 29: Unidentified optimizer")

    #
    #   endregion

    #
    #endregion

    #region Back-End

    #   region Back-End: Gets

    def __threadUI__(self, _eGlobal_Exit, _eGlobal, _eUI, _qReturn) -> None:
        """
            Description:

                Run as Thread(). Gets input without blocking and returns via
                the passed mp.Queue()

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eGlobal_Exit  = ( mp.Event() ) Event signalling global exit
                    for threads and processes

                + _eGlobal  = ( mp.Event() ) Event signalling global action

                + eUI       = ( mp.Event() ) Event signalling input pushed to
                    the output mp.Queue

                + _qReturn  = ( mp.Queue() ) The queue onto which user input
                    should be returned
        """

        #   STEP 0: Local variables
        tBlocking           = None

        qBlocking           = mp.Queue()
        eBlocking           = mp.Event()

        #   STEP 1: Setup - Local variables
        eBlocking.clear()

        #   STEP 2: Setup - Threaded blocking ui
        tBlocking           = tr.Thread(target=self.__threadUI_Blocking__, args=(eBlocking, qBlocking, ) )
        tBlocking.daemon    = True
        tBlocking.start()

        #   STEP 3: Loop to infinity and beyond
        while (True):
            #   STEP 4: Check for global exit event
            if (_eGlobal_Exit.is_set()):
                #   STEP 5: Exit
                break

            #   STEP 6: Check for input from blocking thread
            if (eBlocking.is_set()):
                #   STEP 7:Clear event and pass input along
                eBlocking.clear()

                _qReturn.put( qBlocking.get() )

                #   STEP 8: Set UI event and global event
                _eUI.set()
                _eGlobal.set()

            #   STEP 9: Sleep
            t.sleep(0.35)

        #   STEP 20: Return
        
        return

    def __threadUI_Blocking__(self, _eUI, _qReturn) -> None:
        """
            Description:

                Run as Thread(). Gets blocking input and returns via the passed
                mp.Queue()

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eUI       = ( mp.Event() ) Event signalling input pushed to
                    the output mp.Queue

                + _qReturn  = ( mp.Queue() ) The queue onto which user input
                    should be returned
        """

        #   STEP 0: Local variables

        #   STEP 1: Setup - Local varibales

        #   STEP 2: Loop to infinity
        while (True):
            #   STEP 3: Check - _eUI status
            if (_eUI.is_set()):
                #   STEP 4: Wait for it to finish
                _eUI.wait()

            #   STEP 5: Get input
            sTmp_Input  = input()

            #   STEP 6: Push input to queue
            _qReturn.put([sTmp_Input])

            #   STEP 7: Set event
            _eUI.set()

        #   STEP 8: Return
        return

    #
    #   endregion

    #   region Back-End: Sets
    
    #
    #   endregion

    #   region Back-End: Mapping

    def __mapSurrogate__(self, **kwargs) -> dict:
        """
            Description:
            
                Maps the passed surrogate using thread techniques. If an
                optimizer is specified, that surrogate will be more likely to
                be used during the threaded training process. However, if no
                optimizer is specified then all optimizers will have the same
                probability of being used. 

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + surrogate   = ( vars ) A surrogate class instance
                    ~ Required

                + data        = ( vars ) Data container
                    ~ Required
                    
                + optimizer   = ( enum ) The enum of the optimizer to be used
                    ~ Requried

            |\n

            Returns:

                + dOut  = ( dict ) A dictionary containing the optimized 
                    solution along with its fitness

            |\n

            ToDo:

                + Redo step numbering starting @ 6/7
                + threads can be list instead of tThread0, ...
        """
        
        #   STEP 0: Local variables
        eGlobal                 = None
        eGlobal_Exit            = None

        eUI_Event               = None
        qUI_Queue               = None
        tUI_Thread              = None

        lUO_Lock                = None

        lThread_Data            = []
        lThread_Results         = []
        iThread                 = 8
        iThread_ID              = 0

        fFittest                = np.inf
        iFittest                = 0

        #   STEP 1: Setup - Local variables

        #   region STEP 2->7: Error checking

        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Hermione.__mapSurrogate__() -> Step 2: No data arg passed")

        #   STEP 4: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Hermione.__mapSurrogate__() -> Step 4: No surrogate arg passed")
        
        #   STEP 6: Check if optimizer arg passed
        if ("optimizer" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Hermione.__mapSurrogate__(): No optimizer arg passed")
        
        #
        #   endregion
        
        #   region STEP 8->10: Setup - Local variables

        #   STEP 8: Setup - Global variables
        eGlobal                 = mp.Event()
        eGlobal.clear()

        eGlobal_Exit            = mp.Event()
        eGlobal_Exit.clear()

        #   STEP 9: Setup - UI thread
        eUI_Event               = mp.Event()
        qUI_Queue               = mp.Queue()

        tUI_Thread              = tr.Thread(target=self.__threadUI__, args=(eGlobal_Exit, eGlobal, eUI_Event, qUI_Queue, ))
        tUI_Thread.daemon       = True
        #tUI_Thread.start()

        lUO_Lock                = mp.RLock()

        #   STEP 10: Setup - Thread data list
        for _ in range(0, 4):
            lThread_Data.append(None)

        #
        #   endregion

        #   region 11->55: Mapping process
        
        #   STEP 11: Loop bish
        while (True):

            #   region STEP 12->31: Thread creation

            #   STEP 12: Loop through mapping events
            for i in range(0, len(lThread_Data)):
                #   STEP 13: Check if no event
                if (lThread_Data[i] == None):
                    #   STEP 14: Create a new event
                    eTmp_Event          = mp.Event()
                    eTmp_Event.clear()

                    #   STEP 15: Create tmp thread dictionary
                    dTmp_Thread = {
                        "surrogate":    cp.deepcopy(kwargs["surrogate"]),
                        "data":         cp.deepcopy(kwargs["data"]),
                        "optimizer":    kwargs["optimizer"],

                        "id":           iThread_ID,
                        "thread":       i
                    }

                    #   STEP 16: Create a new queue
                    qTmp_Queue          = mp.Queue()
                    qTmp_Queue.put([dTmp_Thread])

                    #   STEP 17: Create new process
                    tTmp_Thread         = mp.Process(target=self.__threadMap__, args=(eGlobal_Exit, eTmp_Event, qTmp_Queue, lUO_Lock, ) )
                    tTmp_Thread.daemon  = True
                    tTmp_Thread.start()

                    #   STEP 18: Set thread var
                    lThread_Data[i] = [tTmp_Thread, eTmp_Event, qTmp_Queue]

                    #   STEP 19: Increment thread id
                    iThread_ID      += 1

                #   STEP 20: Event not None
                else:
                    #   STEP 21: Check if thread has exited
                    if (lThread_Data[i][1].is_set() == True):
                        #   STEP 22: Clear event
                        lThread_Data[i][1].clear()

                        #   STEP 23: Append data
                        lThread_Results.append( lThread_Data[i][2].get()[0] )

                        #   STEP 24: Check if max thread reached
                        if (iThread_ID < iThread):
                            #   STEP 25: Create tmp thread dictionary
                            dTmp_Thread = {
                                "surrogate":    cp.deepcopy(kwargs["surrogate"]),
                                "data":         cp.deepcopy(kwargs["data"]),
                                "optimizer":    kwargs["optimizer"],

                                "id":           iThread_ID,
                                "thread":       i
                            }

                            #   STEP 26: Update input queue
                            lThread_Data[i][2].put( [dTmp_Thread] )

                            #   STEP 27: Create new thread
                            tTmp_Thread         = mp.Process(target=self.__threadMap__, args=(eGlobal_Exit, lThread_Data[i][1], lThread_Data[i][2], lUO_Lock ) )
                            tTmp_Thread.daemon  = True
                            tTmp_Thread.start()

                            #   STEP 28: Set thread var
                            lThread_Data[i][0]  = tTmp_Thread

                            #   STEP 29: Set training flag and increment thread id
                            iThread_ID      += 1

            #   STEP 30: Check if max threads reached
            if ( len( lThread_Results ) == iThread ):
                #   STEP 31: Exit loop
                break

            #
            #   endregion

            #   region STEP 32->41: UI supposrt

            #   STEP 32: Check if ui event occured
            if (eUI_Event.is_set() == True):
                #   STEP 33: Clear event
                eUI_Event.clear()

                #   STEP 34: Check if ui results == "exit"
                if (qUI_Queue.get()[0] == "exit"):
                    #   STEP 35: Set thread joining event
                    eGlobal_Exit.set()
                    #tUI_Thread.join()

                    #   STEP 36: Loop through mapping threads
                    for i in range(0, len(lThread_Data)):
                        #   STEP 37: Check if thread is still mapping
                        if (lThread_Data[i][0].is_alive() == True):
                            #   STEP 38: Find thread and join
                            lThread_Data[i][0].join()

                            #   STEP 39: Save results
                            lThread_Results.append( lThread_Data[i][2].get()[0] )

                    #   STEP 40: Clear thread join event
                    eGlobal_Exit.clear()

                    #   STEP 41: Exit loop
                    break

            #
            #   endregion

        #
        #   endregion

        #   STEP 42: Iterate through results
        for i in range(0, len(lThread_Results)):
            #   STEP 43: Check if fitness less than current best
            if (lThread_Results[i]["fitness"] < fFittest):
                #   STEP 44: Update - Local variables
                fFittest    = lThread_Results[i]["fitness"]
                iFittest    = i
        
        #   STEP 45: Return
        return lThread_Results[iFittest]

    def __threadMap__(self, _eExit, _eTr, _qTr, _lUO) -> None:
        """
            Description:

                This function outsources the mapping of the surrogate to the
                appropriate optimization handler after picking the optimizer
                to use.

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eGlobal_Exit  = ( mp.Event() ) Event signalling global exit
                    for threads and processes

                + _eTr      = ( mp.Event() ) Event signalling process
                    completion

                + _qTr      = ( mp.Queue() ) The queue onto which the process
                    results should be returned

                + _lUO      = ( mp.RLock() ) The lock for commong user output

            |\n

            Returns:

                + dOut  = ( dict )
                    ~ "result"  = ( list ) The list of surrogate inputs that
                        yielded the best results

                    ~ "fitness" = ( float ) The fitness of the best results
        """
        
        #   STEP 0: Local variables
        dArgs                   = _qTr.get()[0]
        dResults                = None

        iThread_ID              = Helga.ticks()
        iThread_AppID           = dArgs["thread"]

        iSwarms_Active          = 0
        iGA_Active              = 0

        iOptimizers_Active      = 0

        #   region STEP 1->15: Map using provided optimizer

        #   STEP 1: Check if not random optimizer
        if (rn.uniform(0.0, 1.0) > 0.3):
            #   STEP 2: Check if optimizer is GA
            if (ga.isEnum(dArgs["optimizer"])):
                #   STEP 3: User output
                if (self.bShowOutput):
                    #   STEP 4: Get lock
                    _lUO.acquire()

                    #   STEP 5: Populate strings list for threaded output
                    print("\t- Assigning SpongeBob to mapping")
                    print("\t- Optimizer: " + str(dArgs["optimizer"]))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 6: Release lock
                    _lUO.release()

                #   STEP 7: Create new mapper
                sb          = SpongeBob()

                #   STEP 8: Outsource mapping
                dResults    = sb.mapSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], optimizer=dArgs["optimizer"])

            #   STEP 9: Check if swarm
            if (sw.isEnum(dArgs["optimizer"])):
                #   STEP 10: User output
                if (self.bShowOutput):
                    #   STEP 11: Get lock
                    _lUO.acquire()

                    #   STEP 12: Populate strings list for threaded output
                    print("\t- Assigning Sarah to mapping")
                    print("\t- Optimizer: " + str(dArgs["optimizer"]))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 13: Release lock
                    _lUO.release()

                #   STEP 14: Create new mapper
                sh          = Sarah()

                #   STEP 15: Outsource mapping
                dResults    = sh.mapSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], optimizer=dArgs["optimizer"])

        #
        #   endregion

        #   region STEP 16->34: Map using random optimizer

        #   STEP 16: Using random optimizer for mapping
        else:
            #   STEP 17: Update - Local variables
            iSwarms_Active      = sw.getNumActiveSwarms()
            iGA_Active          = ga.getNumActiveGAs()

            iOptimizers_Active  = iSwarms_Active + iGA_Active

            #   STEP 18: Choose a random optimizer
            iTmp_Optimizer  = rn.randint(0, iOptimizers_Active - 1)

            #   STEP 19: Check if swarm:
            if (iTmp_Optimizer < iSwarms_Active):
                #   STEP 20: Get optimizer enum
                eTmp_Optimizer  = sw.getActiveSwarms()[iTmp_Optimizer]

                #   STEP 21: User output
                if (self.bShowOutput):
                    #   STPE 22: Acquire lock
                    _lUO.acquire()

                    #   STEP 23: Populate output strings
                    print("\t- Assigning Sarah to training")
                    print("\t- Optimizer: " + str(eTmp_Optimizer))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 24: Release lock
                    _lUO.release()

                #   STEP 25: Create new mapper
                sh          = Sarah()

                #   STEP 26: Outsource
                dResults    = sh.mapSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], optimizer=eTmp_Optimizer)

            #   STEP 27: Then ga
            else:
                #   STEP 28: Get optimizer enum
                eTmp_Optimizer  = ga.getActiveGAs()[iTmp_Optimizer - iSwarms_Active]

                #   STEP 29: User output
                if (self.bShowOutput):
                    #   STEP 30: Acquired lock
                    _lUO.acquire()

                    #   STEP 31: Populate output strings
                    print("\t- Assigning SpongeBob to training")
                    print("\t- Optimizer: " + str(eTmp_Optimizer))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 32: Release lock
                    _lUO.release()

                #   STEP 33: Create new mapper
                sb          = SpongeBob()

                #   STEP 34: Outsource mapping
                dResults    = sb.mapSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], optimizer=eTmp_Optimizer)

        #
        #   endregion
        
        #   Step 35: User output
        if (self.bShowOutput):
            #   STEP 36: Get lock
            _lUO.acquire()

            #   STEP 37: Create output strings
            print("\t\t\t\t\t- Thread: " + str(iThread_AppID) +  " - <" + str( round( 100.0 * dResults["fitness"], 3 ) ) + ">\n")

            #   STEP 38: Release lock
            _lUO.release()
        
        #   STEP 39: Set results
        _qTr.put([dResults])
        
        #   STEP 40: Set exit event
        _eTr.set()

        #   STEP 41: Return
        return

    #
    #   endregion

    #   region Back-End: Training

    def __trainSurrogate__(self, **kwargs) -> vars:
        """
            Description:
            
                Trains the passed surrogate using thread techniques. If an optimizer is specified,
                that surrogate will be more likely to be used during the threaded training 
                process. However, if no optimizer is specified then all optimizers will have the 
                same probability of being used. 

            |\n
            |\n
            |\n
            |\n
            |\n

            Args:

                + surrogate   = ( vars ) A surrogate class instance
                    ~ Required

                + data        = ( vars ) Data container
                    ~ Required

                + password    = ( int ) Surrogate password
                    ~ Required

                + optimizer   = ( enum ) The enum of the optimizer to be used
                    ~ Default = { Random }

            |\n

            Returns:

                + surrogate   = ( vars ) The optimized surrogate instance
        """
        
        #   STEP 0: Local variables
        eGlobal                 = None
        eGlobal_Exit            = None

        eUI_Event               = None
        qUI_Queue               = None
        tUI_Thread              = None

        lUO_Lock                = None

        lThread_Data            = []
        lThread_Results         = []
        iThread                 = 8
        iThread_ID              = 0

        fFittest                = np.inf
        iFittest                = 0

        #   STEP 1: Setup - Local variables

        #   region STEP 2->9: Error checking

        #   STEP 2: Check if data arg passed
        if ("data" not in kwargs):
            #   STEP 3: Error handling
            raise Exception("An error occured in Hermione.__trainSurrogate__() -> Step 2: No data arg passed")

        #   STEP 4: Check if surrogate arg passed
        if ("surrogate" not in kwargs):
            #   STEP 5: Error handling
            raise Exception("An error occured in Hermione.__trainSurrogate__() -> Step 4: No surrogate arg passed")
        
        #   STEP 6: Check if optimizer arg passed
        if ("optimizer" not in kwargs):
            #   STEP 7: Error handling
            raise Exception("An error occured in Hermione.__trainSurrogate__() -> Step 6: No optimizer arg passed")
        
        #   STEP 8: Check if password arg passed
        if ("password" not in kwargs):
            #   STEP 9: Error handling
            raise Exception("An error occured in Hermione.__trainSurrogate__() -> Step 8: No password arg passed")
        
        #
        #   endregion

        #   region STEP 10->12: Setup - Global variables

        #   STEP 10: Setup - Global variables
        eGlobal                 = mp.Event()
        eGlobal.clear()

        eGlobal_Exit            = mp.Event()
        eGlobal_Exit.clear()

        #   STEP 11: Setup - UI Thread
        eUI_Event               = mp.Event()
        qUI_Queue               = mp.Queue()

        tUI_Thread              = tr.Thread(target=self.__threadUI__, args=(eGlobal_Exit, eGlobal, eUI_Event, qUI_Queue, ))
        tUI_Thread.daemon       = True
        #tUI_Thread.start()

        lUO_Lock                = mp.RLock()

        #   STEP 12: Setup - Thread data list
        for _ in range(0, 4):
            lThread_Data.append(None)

        #
        #   endregion

        #   STEP 13: User output
        if (self.bShowOutput):
            #   STEP 14: Acquire lock
            lUO_Lock.acquire()

            #   STEP 15: print
            print("Hermione (train-thread) {" + Helga.time() + "} - Starting threaded surrogate training.\n")

            #   STEP 16: Release
            lUO_Lock.release()

        #   region STEP 17->46: Training process

        #   STEP 17: Loop bish
        while (True):

            #   region STEP 18->36: Thread creation

            #   STEP 18: Iterate through training events
            for i in range(0, len(lThread_Data)):
                #   STEP 19: Check if event is currently None
                if (lThread_Data[i] == None):
                    #   STEP 20: Create a new event
                    eTmp_Event          = mp.Event()
                    eTmp_Event.clear()

                    #   STEP 21: Create new thread dictionary
                    dTmp_Thread = {
                        "surrogate":    cp.deepcopy(kwargs["surrogate"]),
                        "data":         cp.deepcopy(kwargs["data"]),
                        "optimizer":    kwargs["optimizer"],

                        "id":           iThread_ID,
                        "password":     kwargs["password"],
                        "thread":       i
                    }

                    #   STEP 22: Create a new queue
                    qTmp_Queue          = mp.Queue()
                    qTmp_Queue.put([dTmp_Thread])
                    
                    #   STEP 23: Create new thread
                    tTmp_Thread         = mp.Process(target=self.__threadTrain__, args=(eGlobal_Exit, eTmp_Event, qTmp_Queue, lUO_Lock, ))
                    tTmp_Thread.daemon  = True
                    tTmp_Thread.start()
                    
                    #   STEP 24: Set thread variable
                    lThread_Data[i]     = [tTmp_Thread, eTmp_Event, qTmp_Queue]

                    #   STEP 25: Set training flag and icnrement thread id
                    iThread_ID          += 1

                #   STEP 26: Event not None
                else:
                    #   STEP 27: Check if thread has exited
                    if (lThread_Data[i][1].is_set() == True):
                        #   STEP 28: Clear event
                        lThread_Data[i][1].clear()

                        #   STEP 29: Append data
                        lThread_Results.append( lThread_Data[i][2].get()[0] )

                        #   STEP 30: Check if max threads not reached
                        if (iThread_ID < iThread):
                            #   STEP 31: Create new thread dictionary
                            dTmp_Thread = {
                                "surrogate":    cp.deepcopy(kwargs["surrogate"]),
                                "data":         cp.deepcopy(kwargs["data"]),
                                "optimizer":    kwargs["optimizer"],

                                "id":           iThread_ID,
                                "password":     kwargs["password"],
                                "thread":       i
                            }

                            lThread_Data[i][2].put( [dTmp_Thread] )

                            #   STEP 32: Create a new thread
                            tTmp_Thread         = mp.Process(target=self.__threadTrain__, args=(eGlobal_Exit, lThread_Data[i][1], lThread_Data[i][2], lUO_Lock, ))
                            tTmp_Thread.daemon  = True
                            tTmp_Thread.start()

                            #   STEP 33: Set thread var
                            lThread_Data[i][0]  = tTmp_Thread
                            
                            #   STEP 34: Increment thread id
                            iThread_ID          += 1

            #   STEP 35: Check if 16 threads reached
            if ( len( lThread_Results ) == iThread):
                #   STEP 36: Exit loop
                break

            #
            #   endregion

            #   region STEP 37->46: Ui support

            #   STEP 37: Check if ui thread is set
            if (eUI_Event.is_set() == True):
                #   STEP 38: Clear event
                eUI_Event.clear()

                #   STEP 39: Check if ui output == "stop"
                if (qUI_Queue.get()[0] == "exit"):
                    #   STEP 40: Set thread joining event
                    eGlobal_Exit.set()
                    #tUI_Thread.join()

                    #   STEP 41: Loop through training threads
                    for i in range(0, len( lThread_Data )):
                        #   STEP 42: Check if thread is still training
                        if (lThread_Data[i][0].is_alive() == True):
                            #   STEP 43: Join thread
                            lThread_Data[i][0].join()

                            #   STEP 44: Save results
                            lThread_Results.append( lThread_Data[i][2].get()[0] )

                    #   STEP 45: Reset thread joining event
                    eGlobal_Exit.clear()

                    #   STEP 46: Exit loop
                    break

            #
            #   endregion

        #
        #   endregion

        #   STEP 47: Iterate through results
        for i in range(0, len( lThread_Results )):
            #   STEP 48: Check if fitter than current best
            if (lThread_Results[i]["fitness"] < fFittest):
                #   STEP 49: Set new best candidate
                fFittest    = lThread_Results[i]["fitness"]
                iFittest    = i

        #   STEP 50: Return
        return lThread_Results[iFittest]

    def __threadTrain__(self, _eExit, _eTr, _qTr, _lUO) -> None:
        """
            Description:
            
                This fucntion outsources the training of the surrogate to the appropriate
                optimization handler after finding the optimizer to use.

            |\n
            |\n
            |\n
            |\n
            |\n

            Parameters:

                + _eGlobal_Exit  = ( mp.Event() ) Event signalling global exit
                    for threads and processes

                + _eTr      = ( mp.Event() ) Event signalling process
                    completion

                + _qTr      = ( mp.Queue() ) The queue onto which the process
                    results should be returned

                + _lUO      = ( mp.RLock() ) The lock for commong user output

            |\n

            Returns:

                + dict        = ( dict )
                    ~ surrogate   = ( vars ) The trained surrogate
                    ~ fitness     = ( float ) The overall fitness of the trained surrogate
        """

        #   STEP 0: Local variables
        dArgs                   = _qTr.get()[0]
        dResults                = None

        iThread_ID              = Helga.ticks()
        iThread_AppID           = dArgs["thread"]

        iSwarms_Active          = 0
        iGA_Active              = 0

        iOptimizers_Active      = 0

        #   region STEP 1->15: Train using provided optimizer

        #   STEP 1: Check if not random optimizer
        if (rn.uniform(0.0, 1.0) > 0.3):
            #   STEP 2: Check if optimizer is GA
            if (ga.isEnum(dArgs["optimizer"])):
                #   STEP 3: User output
                if (self.bShowOutput):
                    #   STEP 4: Get lock
                    _lUO.acquire()

                    #   STEP 5: Print output
                    print("\t- Assigning SpongeBob to training")
                    print("\t- Optimizer: " + str(dArgs["optimizer"]))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 6: Release lock
                    _lUO.release()

                #   STEP 7: Create new optimizer
                sb = SpongeBob()

                #   STEP 8: Outsoruce training
                dResults = sb.trainSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], password=dArgs["password"], optimizer=dArgs["optimizer"])

            #   STEP 9: Check if swarm
            elif (sw.isEnum( dArgs["optimizer"] )):
                #   STEP 10: User Output
                if (self.bShowOutput):
                    #   STEP 11: Get lock
                    _lUO.acquire()

                    #   STEP 12: Print strings
                    print("\t- Assigning Sarah to training")
                    print("\t- Optimizer: " + str(dArgs["optimizer"]))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 13: Release lock
                    _lUO.release()

                #   STEP 14: Create new optimizer
                sarah = Sarah()

                #   STEP 15: Outsource training
                dResults = sarah.trainSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], password=dArgs["password"], optimizer=dArgs["optimizer"])

        #
        #   endregion

        #   region STEP 16->34: Random training

        #   STEP 16: Use random
        else:
            #   STEP 17: Update - Local variables
            iSwarms_Active      = sw.getNumActiveSwarms()
            iGA_Active          = ga.getNumActiveGAs()

            iOptimizers_Active  = iSwarms_Active + iGA_Active

            #   STEP 18: Random a handler
            iTmp_Optimizer      = rn.randint(0, iOptimizers_Active - 1)

            #   STEP 19: if swarm
            if (iTmp_Optimizer < iSwarms_Active):
                #   STEP 20: Get new swarm enum
                eTmp_Optimzier  = sw.getActiveSwarms()[iTmp_Optimizer]

                #   STEP 21: User Output
                if (self.bShowOutput):
                    #   STEP 22: Get lock
                    _lUO.acquire()

                    #   STEP 23: Print output
                    print("\t- Assigning Sarah to training")
                    print("\t- Optimizer: " + str(eTmp_Optimzier))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 24: Release lock
                    _lUO.release()

                #   STEP 25: Create new optimizer
                sarah       = Sarah()

                #   STEP 26: Outsource training
                dResults    = sarah.trainSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], password=dArgs["password"], optimizer=eTmp_Optimzier)

            #   STEP 27: Then ga
            else:
                #   STEP 28: Get new ga enum
                eTmp_Optimizer = ga.getActiveGAs()[iTmp_Optimizer - iSwarms_Active]

                #   STEP 29: User Output
                if (self.bShowOutput):
                    #   STEP 30: Acquire lock
                    _lUO.acquire()

                    #   STEP 31: Print output
                    print("\t- Assigning SpongeBob to training")
                    print("\t- Optimizer: " + str(eTmp_Optimizer))
                    print("\t- Thread ID: " + str(iThread_ID))
                    print("\t- Application Thread ID: " + str(iThread_AppID))
                    print("\t- Time: " + Helga.time() + "\n")

                    #   STEP 32: Release lock
                    _lUO.release()

                #   STEP 33: Create new optimizer
                sb          = SpongeBob()

                #   STEP 34: Outsource training
                dResults    = sb.trainSurrogate(surrogate=dArgs["surrogate"], data=dArgs["data"], password=dArgs["password"], optimizer=eTmp_Optimizer)
        
        #
        #   endregion

        #   STEP 35: Get surrogate fitness
        fTmpFitness = dResults["surrogate"].getAFitness(data=dArgs["data"])
        fTmpFitness = fTmpFitness * dResults["inverse accuracy"]
        
        #   STEP 36: User Output
        if (self.bShowOutput):
            #   STEP 37: Get lock
            _lUO.acquire()

            #   STEP 38: Print output
            print("\t\t\t\t\t- Thread: " + str(iThread_AppID) +  " - <" + str(dResults["accuracy"]) + "  :  " + str(round(fTmpFitness, 2)) + ">")
            print("\t\t\t\t\t- Time: " + Helga.time() + "\n")

            #   STEP 39: release lock
            _lUO.release()

        #   STEP 40: Populate output dictionary
        dOut = {
            "accuracy":     dResults["accuracy"],
            "algorithm":    dResults["algorithm"],
            "fitness":      fTmpFitness,
            "iterations":   dResults["iterations"],
            "inverse accuracy": dResults["inverse accuracy"],
            "scalar":       dResults["scalar"],
            "surrogate":    dResults["surrogate"]
        }

        #   STEP 41: Set training results
        _qTr.put([dOut])

        #   STEP 42: Set training finished result
        _eTr.set()

        #   STEP 43: Return
        return

    #
    #   endregion

    #
    #endregion

#
#endregion

#region Testing

#
#endregion