Example #1
0
def main():
    for r in range(1, 10):
        for p in range(1, 10):
            print Scenario.countscenarios(r, p, 1),
            print pascal(cn(r, 1) + p - 1, p),
            print
        print
Example #2
0
File: Ret.py Project: netrc/retpy
    def __init__(self, f, cash=0):
        self.family = f    # ?
        self.cash = cash    # ?
        self.ritems = []
        self.S = Scenario(f)
        self.inflation = 1.03

        # Make our Summary 'columns'
        RSummary("Inc")
        RSummary("Exp")
        RSummary("Inv")
Example #3
0
    def create_scenarios(self):

        for pathId in range(0, len(self.pathList)):

            scenario = Scenario(pathId + 1, '', [], self.control_words)

            for edge in self.pathList[pathId]:

                scenario.add_step(edge.label,
                                  self.get_node_label(edge.destinationNode))

            self.scenariosList.append(scenario)
Example #4
0
    def run(self):
        filename = sys.argv[1].decode("utf-8") if len(sys.argv) > 1 else None
        if not filename:
            filename = os.path.join(
                os.path.dirname(sys.argv[0].decode("utf-8")), "config.yaml")
        with open(filename, "r") as f:
            config = yaml.safe_load(f)

        root_path = os.path.dirname(filename.encode("utf-8"))
        (w, h) = (config.get("screen").get("size")[0],
                  config.get("screen").get("size")[1])
        (x, y) = (w / 2, h / 2)
        Media.Music.init()
        pygame.init()
        #    pygame.display.set_mode((w, h),pygame.FULLSCREEN,32)
        pygame.display.set_mode((w, h), pygame.DOUBLEBUF, 32)

        screen = pygame.display.get_surface()
        pygame.display.set_caption(
            os.path.basename(os.path.dirname(filename.encode("utf-8"))))
        pygame.display.update()

        image_cache = Media.ImageCache(screen)

        while True:
            scenario = Scenario.Scenario(root_path, config)
            self.title(scenario, screen, image_cache)
            self.execute_scenario(scenario, screen, image_cache)
Example #5
0
def createScenarioFromSpirentFile(SVNum,CNO=38,Quantize=False):
    Jerk = 100
    dT = 0.001
    satelliteDict = Scenario.readSimulatorMotionFile()
    motionList = []
   
    Time,losX,losY,losZ = satelliteDict[SVNum]
    Time/=1000
    pseudorange = np.sqrt(losX**2+losY**2+losZ**2)
    velocity = np.diff(pseudorange)
    velocity-=velocity[0]
    acceleration = np.diff(velocity)

    pseudorange = pseudorange[:-2]
    velocity = velocity[:-1]
    T = Time[:-2]


    for i in range(0,acceleration.size):
        time = Time[i]
        targetAcceleration = acceleration[i]
        motionList.append(motion(time,targetAcceleration,Jerk))
        
        
    TMin = Time[0]
    TMax = Time[-1]
    initialState = [0, 0, acceleration[0]]
    T,stateHistory = GenerateTrajectory(TMin, TMax, initialState, motionList,dT)
    if Quantize==True:
        stateHistory = quantizeSpirentStateHistory(T,stateHistory)
    SignalIn = GenerateSignal(T,stateHistory,CNO)
    return(T,SignalIn,stateHistory)
Example #6
0
def GetConfig(path):
    cfg = configparser.ConfigParser()
    cfg.read(path)

    name = cfg.get('Project', 'name')
    setup = cfg.get('Project', 'setup')
    scenario_count = cfg.get('Project', 'scenario_count')

    scenarios = []

    for i in range(int(scenario_count)):
        scen = Scenario.Scenario('scenario_' + str(i))
        scen.SetScenario(cfg)
        scenarios.append(scen)

    with open(join(info_dir,setup), 'r') as setup:
        setup_file = setup.read()

    config = {
        'name' : name,
        'setup' : setup_file,
        'scenario_count' : scenario_count,
        'scenarios' : scenarios
    }

    return config
Example #7
0
    def test_scenario_getSteps(self):

        control_words = ControllWords.ControlWords(Output.Output())

        steps = [Step.Step(7, "abc\n\code: ghi", "def\n\code:jkl", control_words)]
        scenario = Scenario.Scenario(3, "scn", steps, control_words)

        self.assertEqual(scenario.get_steps(), [scenario.steps[0]])
Example #8
0
    def test_scenario_init(self):

        control_words = ControllWords.ControlWords(Output.Output())

        steps = [Step.Step(7, "abc\n\code: ghi", "def\n\code:jkl", control_words)]
        scenario = Scenario.Scenario(3, "scn", steps, control_words)

        self.assertEqual(scenario.id, 3)
        self.assertEqual(scenario.name, "scn")
        self.assertEqual(scenario.steps, steps)
Example #9
0
    def test_scenario_addStep(self):

        control_words = ControllWords.ControlWords(Output.Output())

        steps = [Step.Step(7, "abc\n\code: ghi", "def\n\code:jkl", control_words)]
        scenario = Scenario.Scenario(3, "scn", steps, control_words)

        scenario.add_step("act", "nod")

        self.assertEqual(scenario.steps[1].action.description, ["act"])
        self.assertEqual(scenario.steps[1].node.description, ["nod"])
def verify(p,c,maxproc=2,timeout=None):

    for m in p.models:
        t = None
        comment = "MaxProc%i" % maxproc
        if m == "Traces":
            t = Test.Test(p.name,c,p.toolname,m,timeout=timeout,comment=comment)
        elif m == "MaxProc":
            t = Test.Test(p.name,c,p.toolname,m,maxproc,timeout=timeout,comment=comment)
        elif m == "Scen":
            agents = getAgents(c)
            sl = Scenario.ScenarioSet(p.roleCount(),maxproc,fill=True,agentcount=agents).list
            t = Test.Test(p.name,c,p.toolname,m,sl,timeout=timeout,comment=comment)
        elif m == "RepScen":
            agents = getAgents(c)
            ss = Scenario.ScenarioSet(p.roleCount(),1,fill=True,agentcount=agents)
            ss.cover()
            sl = ss.list
            t = Test.Test(p.name,c,p.toolname,m,sl,timeout=timeout,comment=comment)
        if t:
            Report.report(t)
Example #11
0
def compareRecievers():
    channelDict = Scenario.readNMEAFile()
    satelliteDict = Scenario.readSimulatorMotionFile()
    startTime = 406800 #second of week

    CNO = 48
    PLLBW = 2
    FLLBW = 10

    for SVNum in [1,3,6,11,16,18,19,22,31]:
        T,SignalIn,stateHistory = \
        GenSignal.createScenarioFromSpirentFile(SVNum,CNO)
        coherentIntegrationTime = 4
        numOutputSamples = T.size/coherentIntegrationTime

        print('Running Reciever')
        Z,DcoFreqArray,LockStateValues,PhaseError = \
        runReceiver(T,SignalIn,18,FLLBW)
        PolarisFreq = DcoFreqArray[500:1000]
        plt.plot(PolarisFreq,'r')

        plt.ylabel('Doppler Shift (Hz)')
        plt.xlabel('Time (s)')
        plt.ylim(-4,4)
        plt.title('Bn=18Hz')
        plt.savefig(str(SVNum)+'18Polaris.eps',format='eps', dpi=1000)
        plt.close()


        Z,DcoFreqArray,LockStateValues,PhaseError = \
        runReceiver(T,SignalIn,32,FLLBW)
        PolarisFreq = DcoFreqArray[500:1000]
        plt.plot(PolarisFreq,'r')
        plt.ylim(-4,4)
        plt.ylabel('Doppler Shift (Hz)')
        plt.xlabel('Time (s)')
        plt.title('Bn=32Hz')
        
        plt.savefig(str(SVNum)+'32Polaris.eps',format='eps', dpi=1000)
        plt.close()
Example #12
0
    def test_scenario_get_id(self):

        control_words = ControllWords.ControlWords(Output.Output())

        steps = [Step.Step(7, "abc\n\code: ghi", "def\n\code:jkl", control_words)]

        scenario1 = Scenario.Scenario(1, "scn", steps, control_words)
        scenario9 = Scenario.Scenario(9, "scn", steps, control_words)
        scenario10 = Scenario.Scenario(10, "scn", steps, control_words)
        scenario99 = Scenario.Scenario(99, "scn", steps, control_words)
        scenario100 = Scenario.Scenario(100, "scn", steps, control_words)
        scenario999 = Scenario.Scenario(999, "scn", steps, control_words)
        scenario1000 = Scenario.Scenario(1000, "scn", steps, control_words)
        scenario9999 = Scenario.Scenario(9999, "scn", steps, control_words)

        self.assertEqual(scenario1.get_id_str(), '0001')
        self.assertEqual(scenario9.get_id_str(), '0009')
        self.assertEqual(scenario10.get_id_str(), '0010')
        self.assertEqual(scenario99.get_id_str(), '0099')
        self.assertEqual(scenario100.get_id_str(), '0100')
        self.assertEqual(scenario999.get_id_str(), '0999')
        self.assertEqual(scenario1000.get_id_str(), '1000')
        self.assertEqual(scenario9999.get_id_str(), '9999')
Example #13
0
    def solve(self):
        """ Run storageVET

        Returns: the Results class

        """
        starts = time.time()
        if Params.storagevet_requirement_check():
            for key, value in self.case_dict.items():
                run = Scenario.Scenario(value)
                run.add_technology()
                run.add_services()
                run.init_financials(value.Finance)
                run.add_control_constraints()
                run.optimize_problem_loop()

                Result.add_instance(
                    key, run)  # cost benefit analysis is in the Result class

            Result.sensitivity_summary()

        ends = time.time()
        print("Full runtime: " + str(ends - starts)) if self.verbose else None
        return Result
Example #14
0
            config = yaml.safe_load(f)

        root_path = os.path.dirname(filename.encode("utf-8"))
        (w, h) = (config.get("screen").get("size")[0],
                  config.get("screen").get("size")[1])
        (x, y) = (w / 2, h / 2)
        Media.Music.init()
        pygame.init()
        #    pygame.display.set_mode((w, h),pygame.FULLSCREEN,32)
        pygame.display.set_mode((w, h), pygame.DOUBLEBUF, 32)

        screen = pygame.display.get_surface()
        pygame.display.set_caption(
            os.path.basename(os.path.dirname(filename.encode("utf-8"))))
        pygame.display.update()

        image_cache = Media.ImageCache(screen)

        while True:
            scenario = Scenario.Scenario(root_path, config)
            self.title(scenario, screen, image_cache)
            self.execute_scenario(scenario, screen, image_cache)

if 0:
    scenario = Scenario.Scenario()
    for f in scenario.run():
        print(f)
else:
    gui = GUI()
    gui.run()
        else:
            raise Exception, ("Don't know claim %s" % claim)

        prot += """
    end goal


    environment()
        """
        return (prot, "")


def registerall():
    Protocol.register(eke_hlpsl("ofmc", ["Scen"]))
    Protocol.register(eke_hlpsl("satmc", ["Scen"]))
    Protocol.register(eke_hlpsl("cl-atse", ["Scen"]))
    Protocol.register(eke_hlpsl("ta4sp", ["RepScen"]))


registerall()

if __name__ == "__main__":
    import Scenario

    sl = Scenario.genall(2, 2, 1)
    x = eke_hlpsl("ofmc", ["Scen"])
    (prot, args) = x.generate("Scen", "sAk", sl[1])

    print prot
    print args
Example #16
0
    def __init__(self, log_flag):
        self.logger = Logger(log_flag, LOG_FILE);
        self.logger.info('Started Domoleaf Master Daemon');
        self.d3config = {};
        self.aes_slave_keys = {};
        self.aes_master_key = None
        self.connected_clients = {};
        self.sql = MasterSql();
        self._parser = DaemonConfigParser(MASTER_CONF_FILE);
        self.db_username = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_USER_ENTRY);
        self.db_passwd = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_PASSWORD_ENTRY);
        self.db_dbname = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_DB_NAME_ENTRY);
        self.get_aes_slave_keys(0);
        self.reload_camera(None, None, 0);
        self._scanner = Scanner();
        self.hostlist = [];
        self.hostlist.append(Host('', '127.0.0.1', socket.gethostname().upper()));
        self.knx_manager = KNXManager(self.aes_slave_keys);
        self.enocean_manager = EnOceanManager(self.aes_slave_keys);
        self.reload_d3config(None, None, 0);
        self.trigger = Trigger(self);
        self.scenario = Scenario(self);
        self.schedule = Schedule(self);
        self.calcLogs = CalcLogs(self);

        self.functions = {
              1 : self.knx_manager.send_knx_write_short_to_slave,
              2 : self.knx_manager.send_knx_write_long_to_slave,
              3 : self.knx_manager.send_knx_write_speed_fan,
              4 : self.knx_manager.send_knx_write_temp,
              5 : IP_IRManager().send_to_gc,
              6 : self.knx_manager.send_on,
              7 : self.knx_manager.send_to_thermostat,
              8 : self.knx_manager.send_clim_mode,
              9 : HttpReq().http_action,
             10 : self.upnp_audio,
             11 : self.knx_manager.send_knx_write_percent,
             12 : self.knx_manager.send_off,
             13 : self.knx_manager.send_knx_write_short_to_slave_r,
        };
        self.data_function = {
            DATA_MONITOR_KNX                  : self.monitor_knx,
            DATA_MONITOR_IP                   : self.monitor_ip,
            DATA_MONITOR_ENOCEAN              : self.monitor_enocean,
            DATA_MONITOR_BLUETOOTH            : self.monitor_bluetooth,
            DATA_KNX_READ                     : self.knx_read,
            DATA_KNX_WRITE_S                  : self.knx_write_short,
            DATA_KNX_WRITE_L                  : self.knx_write_long,
            DATA_SEND_TO_DEVICE               : self.send_to_device,
            DATA_CRON_UPNP                    : self.cron_upnp,
            DATA_SEND_MAIL                    : self.send_mail,
            DATA_MODIF_DATETIME               : self.modif_datetime,
            DATA_CHECK_SLAVE                  : self.check_slave,
            DATA_RELOAD_CAMERA                : self.reload_camera,
            DATA_RELOAD_D3CONFIG              : self.reload_d3config,
            DATA_BACKUP_DB_CREATE_LOCAL       : self.backup_db_create_local,
            DATA_BACKUP_DB_REMOVE_LOCAL       : self.backup_db_remove_local,
            DATA_BACKUP_DB_LIST_LOCAL         : self.backup_db_list_local,
            DATA_BACKUP_DB_RESTORE_LOCAL      : self.backup_db_restore_local,
            DATA_CHECK_USB                    : self.check_usb,
            DATA_BACKUP_DB_CREATE_USB         : self.backup_db_create_usb,
            DATA_BACKUP_DB_REMOVE_USB         : self.backup_db_remove_usb,
            DATA_BACKUP_DB_LIST_USB           : self.backup_db_list_usb,
            DATA_BACKUP_DB_RESTORE_USB        : self.backup_db_restore_usb,
            DATA_SMARTCMD_LAUNCH              : self.smartcmd_launch,
            DATA_TRIGGERS_LIST_UPDATE         : self.triggers_list_update,
            DATA_SCHEDULES_LIST_UPDATE        : self.schedules_list_update,
            DATA_SCENARIOS_LIST_UPDATE        : self.scenarios_list_update,
            DATA_CHECK_ALL_SCHEDULES          : self.check_schedules,
            DATA_CALC_LOGS                    : self.launch_calc_logs,
            DATA_CHECK_UPDATES                : self.check_updates,
            DATA_UPDATE                       : self.update,
            DATA_SEND_ALIVE                   : self.send_request,
            DATA_SEND_TECH                    : self.send_tech,
            DATA_SEND_INTERFACES              : self.send_interfaces,
            DATA_SHUTDOWN_D3                  : self.shutdown_d3,
            DATA_REBOOT_D3                    : self.reboot_d3,
            DATA_WIFI_UPDATE                  : self.wifi_update,
            DATA_REMOTE_SQL                   : self.remote_sql
        };
Example #17
0
#!/usr/bin/env python

import Scenario
import Perso
import utils as u

if __name__ == "__main__":
    u.clear()
    u.splash()
    c = input("o-]===> ")
    u.clear()
    if c == "1":
        print("Mode non disponible")
    elif c == "3":
        pass
    else:
        u.game_init()
        Scenario.Scenario(Perso.Perso(), 0)
#Equivalences
forall AA, BB : Agent . P(AA,BB) = P(BB,AA)

#System

%s
INITIATOR(Alice, K1, K2, Na)
RESPONDER(Alice, R, Nb)

#Intruder Information

Intruder = Mallory
IntruderKnowledge = {A, B, Mallory, Nm, Km1, Km2, Rm, P(Alice, Mallory), P(Bob,Mallory)}
    """ % (narrstr, pubkeydef, seckeydef, invkeydef, scenstr)

        return (prot, "")


Protocol.register(eke_spl("casperfdr", ["Scen"]))

if __name__ == "__main__":
    import Scenario

    sl = Scenario.genall(2, 5, 1)
    x = eke_spl("casperfdr", ["Scen"])
    (prot, args) = x.generate("Scen", "sBk", sl[7])

    print prot
    print args
Example #19
0
File: Ret.py Project: netrc/retpy
class Portfolio:            # or should just be a global
    def __init__(self, f, cash=0):
        self.family = f    # ?
        self.cash = cash    # ?
        self.ritems = []
        self.S = Scenario(f)
        self.inflation = 1.03

        # Make our Summary 'columns'
        RSummary("Inc")
        RSummary("Exp")
        RSummary("Inv")

    # TODO: Take out Portfolio ritems ; now all ritems are stored only in Summary columns
    def ritemsAppend(self,r):
        #self.ritems.append(r)
        pass

    def cash(self):
        return self.cash;

    def netWorth(self):
        #print("cash:${}  Inc:${}  Inv:${}  Exp:${}".format( self.cash, RSummary.value("Inc") , RSummary.value("Inv") , RSummary.value("Exp") ))
        return  self.cash + RSummary.value("Inc") + RSummary.value("Inv") - RSummary.value("Exp")

    def summaryString(self):
        return "c= ${} ${} e= ${} i=${:.0f} ==> ${:.0f}".format(self.cash, RSummary.value("Inc"), RSummary.value("Exp"), RSummary.value("Inv"), self.netWorth())
        

    def run(self,startYear,endYear):
        logging.debug("INIT: {}".format(self.summaryString()))
        for y in range(startYear, endYear+1):
            self.S.addYear(y)
            logging.debug("Starting: {}".format(y))
            for sname in RSummary._sumItems:
                for r in RSummary._sumItems[sname].ritems:
                    #logging.debug("...doing item: {}".format(r.name))
                    # each "event" is a value ?? or a transfer and a value??
                    r.reset()
                    r.currentYear = y   # used for various Ritems, e.g. expenseInflation
                    for e in r.events:
                        if (e.year == y):
                            #logging.debug ("... run event: {}".format(e.name))
                            e.func(r)
                    # after all the events are done
                    self.S.addColVal(r.name, y,r.value)
                    #r.sumToSummary()
                self.S.addColVal(sname,y,RSummary.value(sname))

            logging.debug("{}: {}".format(y, self.summaryString()))
            # TODO: somehow make NetW and Cash columns like RSummary, so no special adding here
            self.S.addColVal('NetW',y,self.netWorth())
            self.cash += RSummary.value("Inc") - RSummary.value("Exp");
            self.S.addColVal('Cash',y,self.cash)
            # hack
            self.S.ritemLists( RSummary._sumItems["Inc"].ritems, RSummary._sumItems["Exp"].ritems, RSummary._sumItems["Inv"].ritems )
    def runTilEnd(self,startYear):
        self.run(startYear,self.family.lastYear())
Example #20
0
from Tools import *
from Scenario import *
from NSGA import *
from Evolution import *
import matplotlib.pyplot as plt

tool_box = Tools()
flow_data = tool_box.read_flow_data_from_txt()
loc_data = tool_box.read_loc_data_from_txt()
time_data = tool_box.read_slot_data_from_txt()
scenario = Scenario(flow_data, loc_data, time_data)
# scenario.generate_initial_solution()
print 'scenario generated'
problem = NSGA_problem(scenario)
evolve = Evolution(problem, 50, 100)
selected_individuals = evolve.evolve()
f = open('gene.txt', 'w')
for i in selected_individuals:
    s = ''
    for g in i.features:
        s = s + ',' + str(g)
    f.write(s + '\n')
f.close()
x = [problem.f1(i) for i in selected_individuals]
y = [problem.f2(i) for i in selected_individuals]
print x, y
plt.plot(x, y, 'ro')
plt.show()
Example #21
0
class MasterDaemon:
    """
    Main class of the master daemon
    It provides communication between master and slave boxes and a part of the database management
    """
    def __init__(self, log_flag):
        self.logger = Logger(log_flag, LOG_FILE);
        self.logger.info('Started Domoleaf Master Daemon');
        self.d3config = {};
        self.aes_slave_keys = {};
        self.aes_master_key = None
        self.connected_clients = {};
        self.sql = MasterSql();
        self._parser = DaemonConfigParser(MASTER_CONF_FILE);
        self.db_username = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_USER_ENTRY);
        self.db_passwd = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_PASSWORD_ENTRY);
        self.db_dbname = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_DB_NAME_ENTRY);
        self.get_aes_slave_keys(0);
        self.reload_camera(None, None, 0);
        self._scanner = Scanner();
        self.hostlist = [];
        self.hostlist.append(Host('', '127.0.0.1', socket.gethostname().upper()));
        self.knx_manager = KNXManager(self.aes_slave_keys);
        self.enocean_manager = EnOceanManager(self.aes_slave_keys);
        self.reload_d3config(None, None, 0);
        self.trigger = Trigger(self);
        self.scenario = Scenario(self);
        self.schedule = Schedule(self);
        self.calcLogs = CalcLogs(self);

        self.functions = {
              1 : self.knx_manager.send_knx_write_short_to_slave,
              2 : self.knx_manager.send_knx_write_long_to_slave,
              3 : self.knx_manager.send_knx_write_speed_fan,
              4 : self.knx_manager.send_knx_write_temp,
              5 : IP_IRManager().send_to_gc,
              6 : self.knx_manager.send_on,
              7 : self.knx_manager.send_to_thermostat,
              8 : self.knx_manager.send_clim_mode,
              9 : HttpReq().http_action,
             10 : self.upnp_audio,
             11 : self.knx_manager.send_knx_write_percent,
             12 : self.knx_manager.send_off,
             13 : self.knx_manager.send_knx_write_short_to_slave_r,
        };
        self.data_function = {
            DATA_MONITOR_KNX                  : self.monitor_knx,
            DATA_MONITOR_IP                   : self.monitor_ip,
            DATA_MONITOR_ENOCEAN              : self.monitor_enocean,
            DATA_MONITOR_BLUETOOTH            : self.monitor_bluetooth,
            DATA_KNX_READ                     : self.knx_read,
            DATA_KNX_WRITE_S                  : self.knx_write_short,
            DATA_KNX_WRITE_L                  : self.knx_write_long,
            DATA_SEND_TO_DEVICE               : self.send_to_device,
            DATA_CRON_UPNP                    : self.cron_upnp,
            DATA_SEND_MAIL                    : self.send_mail,
            DATA_MODIF_DATETIME               : self.modif_datetime,
            DATA_CHECK_SLAVE                  : self.check_slave,
            DATA_RELOAD_CAMERA                : self.reload_camera,
            DATA_RELOAD_D3CONFIG              : self.reload_d3config,
            DATA_BACKUP_DB_CREATE_LOCAL       : self.backup_db_create_local,
            DATA_BACKUP_DB_REMOVE_LOCAL       : self.backup_db_remove_local,
            DATA_BACKUP_DB_LIST_LOCAL         : self.backup_db_list_local,
            DATA_BACKUP_DB_RESTORE_LOCAL      : self.backup_db_restore_local,
            DATA_CHECK_USB                    : self.check_usb,
            DATA_BACKUP_DB_CREATE_USB         : self.backup_db_create_usb,
            DATA_BACKUP_DB_REMOVE_USB         : self.backup_db_remove_usb,
            DATA_BACKUP_DB_LIST_USB           : self.backup_db_list_usb,
            DATA_BACKUP_DB_RESTORE_USB        : self.backup_db_restore_usb,
            DATA_SMARTCMD_LAUNCH              : self.smartcmd_launch,
            DATA_TRIGGERS_LIST_UPDATE         : self.triggers_list_update,
            DATA_SCHEDULES_LIST_UPDATE        : self.schedules_list_update,
            DATA_SCENARIOS_LIST_UPDATE        : self.scenarios_list_update,
            DATA_CHECK_ALL_SCHEDULES          : self.check_schedules,
            DATA_CALC_LOGS                    : self.launch_calc_logs,
            DATA_CHECK_UPDATES                : self.check_updates,
            DATA_UPDATE                       : self.update,
            DATA_SEND_ALIVE                   : self.send_request,
            DATA_SEND_TECH                    : self.send_tech,
            DATA_SEND_INTERFACES              : self.send_interfaces,
            DATA_SHUTDOWN_D3                  : self.shutdown_d3,
            DATA_REBOOT_D3                    : self.reboot_d3,
            DATA_WIFI_UPDATE                  : self.wifi_update,
            DATA_REMOTE_SQL                   : self.remote_sql
        };

    def get_aes_slave_keys(self, db):
        """
        Get the secretkeys of each slave daemon stored in database
        """
        query = "SELECT serial, secretkey FROM daemon";
        res = self.sql.mysql_handler_personnal_query(query, db);
        self_hostname = socket.gethostname();
        for r in res:
            if SLAVE_NAME_PREFIX in r[0] or 'MD3' in r[0]:
                self.aes_slave_keys[r[0]] = r[1];
            elif self_hostname == r[0]:
                self.aes_slave_keys[r[0]] = r[1];
                self.aes_master_key = r[1];

    def stop(self):
        """
        Stops the daemon and closes sockets
        """
        flag = False;
        while not flag:
            flag = True;
            for client in self.connected_clients.values():
                flag = False;
                client.close();
                break;
        self.slave_connection.close();
        sys.exit(0);

    def run(self):
        """
        Initialization of the connections and accepting incomming communications
        """
        self.slave_connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM);
        self.cmd_connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM);
        self.slave_connection.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1);
        self.cmd_connection.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1);
        self.slave_connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1);
        self.cmd_connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1);
        s_port = self._parser.getValueFromSection(MASTER_CONF_LISTEN_SECTION, MASTER_CONF_LISTEN_PORT_SLAVE_ENTRY);
        c_port = self._parser.getValueFromSection(MASTER_CONF_LISTEN_SECTION, MASTER_CONF_LISTEN_PORT_CMD_ENTRY);
        if not s_port:
            frameinfo = getframeinfo(currentframe());
            self.logger.error('in run: No slave listening port defined in '+MASTER_CONF_FILE);
            sys.exit(1);
        if not c_port:
            frameinfo = getframeinfo(currentframe());
            self.logger.error('in run: No command listening port defined in '+MASTER_CONF_FILE);
            sys.exit(1);
        self.slave_connection.bind(('', int(s_port)));
        self.slave_connection.listen(MAX_SLAVES);
        self.cmd_connection.bind(('', int(c_port)));
        self.cmd_connection.listen(MAX_CMDS);
        self.loop();

    def loop(self):
        """
        Main loop. Waits for new connections.
        """
        self.run = True;
        while self.run:
            try:
                rlist, wlist, elist = select.select([self.slave_connection], [], [], SELECT_TIMEOUT);
                for connection in rlist:
                    self.accept_new_slave_connection(connection);
                rlist, wlist, elist = select.select([self.cmd_connection], [], [], SELECT_TIMEOUT);
                for connection in rlist:
                    self.accept_new_cmd_connection(connection);
            except KeyboardInterrupt as e:
                frameinfo = getframeinfo(currentframe());
                self.logger.info('in loop: Keyboard interrupt: leaving program');
                print("[ MASTER DAEMON ",frameinfo.filename,":",str(frameinfo.lineno)," ]: Keyboard Interrupt");
                self.stop();
                sys.exit(0);
            except ValueError as e:
                frameinfo = getframeinfo(currentframe());
                self.logger.error('in loop: Value error: '+str(e));
                print("[ MASTER DAEMON ",frameinfo.filename,":",str(frameinfo.lineno),"]: Value Error");
                print(e);
                pass;

    def accept_new_cmd_connection(self, connection):
        """
        Gets new domoleaf connections and threads the treatment.
        """
        new_connection, addr = connection.accept();
        r = CommandReceiver(new_connection, self);
        r.start();

    def accept_new_slave_connection(self, connection):
        """
        Gets new slave connections and threads the treatment.
        """
        new_connection, addr = connection.accept();
        myname = socket.gethostname();
        try:
            name = socket.gethostbyaddr(addr[0])[0]
        except socket.error as serr:
            name = 'localhost'
        if name == 'localhost':
            name = myname
        name = name.split('.')[0];
        r = SlaveReceiver(new_connection, name, self);
        r.start();

    def parse_data(self, data, connection, daemon_id, db):
        """
        Once data are received whether from domoleaf or slave, the function of the packet_type in data is called.
        """
        json_obj = json.JSONDecoder().decode(data);
        json_obj['daemon_id'] = daemon_id;
        if json_obj['packet_type'] in self.data_function.keys():
            self.data_function[json_obj['packet_type']](json_obj, connection, db);
        else:
            frameinfo = getframeinfo(currentframe());

    def check_updates(self, json_obj, connection, db):
        query = 'SELECT configuration_value FROM configuration WHERE configuration_id=4';
        actual_version = self.sql.mysql_handler_personnal_query(query, db);
        if not actual_version:
            self.logger.error("CHECK_UPDATE : No Master Version");
            return;
        query = 'UPDATE configuration SET configuration_value="" WHERE configuration_id=13';
        self.sql.mysql_handler_personnal_query(query, db);
        p = call(['dpkg', '--configure', '-a'])
        p = Popen(['apt-get', 'update'], stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=-1);
        output, error = p.communicate();
        p = Popen(['apt-show-versions',  '-u', 'domomaster'], stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=-1);
        output, error = p.communicate();
        if not p.returncode:
            tab = output.decode("utf-8").split(" ");
            version = tab[-1].rsplit("\n")[0];
        else:
            version = actual_version[0][0];
        query = ''.join(['UPDATE configuration SET configuration_value="', version, '" WHERE configuration_id=13']);
        self.sql.mysql_handler_personnal_query(query, db);

    def update(self, json_obj, connection, db):
        call(['apt-get', 'update']);
        p = Popen("DEBIAN_FRONTEND=noninteractive apt-get install domomaster domoslave -y ",
              shell=True, stdin=None, stdout=False, stderr=False,executable="/bin/bash");
        output, error = p.communicate();
        hostname = socket.gethostname();
        if '.' in hostname:
            hostname = hostname.split('.')[0];
        version = os.popen("dpkg-query -W -f='${Version}\n' domomaster").read().split('\n')[0];
        query = ''.join(['UPDATmon SET version="', version, '" WHERE name="', hostname, '"' ]);
        self.sql.mysql_handler_personnal_query(query, db);
        query = ''.join(['UPDATE configuration SET configuration_value="', version, '" WHERE configuration_id=4']);
        self.sql.mysql_handler_personnal_query(query, db);
        json_obj['data'].append(hostname);
        port = self._parser.getValueFromSection('connect', 'port');
        for host in self.hostlist:
            if (host._Hostname.startswith('MD3') or host._Hostname.startswith('SD3')) and host._Hostname not in json_obj['data']:
                sock = socket.create_connection((host._IpAddr, port));
                json_str = json.JSONEncoder().encode(json_obj);
                sock.send(bytes(json_str, 'utf-8'));
                data = sock.recv(4096);
                decrypt_IV = data[:16].decode();
                decode_obj = AES.new(self.aes_master_key, AES.MODE_CBC, decrypt_IV);
                data2 = decode_obj.decrypt(data[16:]).decode();
                version = data2['new_version'];
                query = ''.join(['UPDATE daemon SET version="', version, '" WHERE name="', host._Hostname, '"']);
                self.sql.mysql_handler_personnal_query(query, db);
                sock.close();

    def backup_db_create_local(self, json_obj, connection, db):
        path = '/etc/domoleaf/sql/backup/';
        filename = 'domoleaf_backup_';
        t = str(time.time());
        if '.' in t:
            t = t.split('.')[0];
        filename += t+'.sql';
        os.system("mysqldump --defaults-file=/etc/mysql/debian.cnf domoleaf > "+path+filename);
        os.system('cd '+path+' && tar -czf '+filename+'.tar.gz'+' '+filename);
        os.system('rm '+path+filename);

    def backup_db_remove_local(self, json_obj, connection, db):
        filename = ''.join(['/etc/domoleaf/sql/backup/domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']);
        if str(json_obj['data'][0]) == '.' or str(json_obj['data'][0]) == '/':
            self.logger.error('The filename is corrupted. Aborting database file removing.')
            return;
        try:
            os.stat(filename);
        except Exception as e:
            try:
                filename = filename.split('.tar.gz')[0];
                os.stat(filename);
            except Exception as e:
                self.logger.error("The database file to remove does not exists.")
                self.logger.error(e)
                return;
        os.remove(filename);

    def backup_db_list_local(self, json_obj, connection, db):
        json_obj = [];
        append = json_obj.append;
        backup_list = os.listdir('/etc/domoleaf/sql/backup/')
        for f in backup_list:
            s = os.stat('/etc/domoleaf/sql/backup/'+f);
            if '.sql' in f:
                g = f.split('.sql')[0];
                append({"name": g, "size": s.st_size});
        json_sorted = sorted(json_obj, key=lambda json_obj: json_obj['name'], reverse=True);
        json_str = json.JSONEncoder().encode(json_sorted);
        connection.send(bytes(json_str, 'utf-8'));

    def backup_db_restore_local(self, json_obj, connection, db):
        path = '/etc/domoleaf/sql/backup/';
        filename = ''.join(['domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']);
        if json_obj['data'][0] == '.' or json_obj['data'][0] == '/':
            self.logger.error('The filename is corrupted. Aborting database restoring.')
            return;
        try:
            os.stat(path+filename);
            os.system('cd '+path+' && tar -xzf '+filename);
            os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < '+path+filename.split('.tar.gz')[0]);
            os.system('rm '+path+filename.split('.tar.gz')[0]);
            return;
        except Exception as e:
            try:
                filename = filename.split('.tar.gz')[0];
                os.stat(path+filename);
            except Exception as e:
                self.logger.error("The database file to restore does not exists.");
                self.logger.error(e);
                return;
        os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < '+path+filename);

    def check_usb(self, json_obj, connection, db):
        try:
            sdx1 = glob.glob('/dev/sd?1')[0];
        except Exception as e:
            return;
        if not (os.path.exists(sdx1)):
            json_obj = 0;
        else:
            json_obj = 1;
        json_str = json.JSONEncoder().encode(json_obj);
        connection.send(bytes(json_str, 'utf-8'));

    def backup_db_list_usb(self, json_obj, connection, db):
        json_obj = [];
        append = json_obj.append
        sdx1 = glob.glob('/dev/sd?1')[0];
        if not (os.path.exists(sdx1)):
            return;
        os.system('mount '+sdx1+' /etc/domoleaf/mnt');
        os.system('mkdir -p /etc/domoleaf/mnt/backup');
        backup_list = os.listdir('/etc/domoleaf/mnt/backup/')
        for f in backup_list:
            s = os.stat('/etc/domoleaf/mnt/backup/'+f);
            if '.sql' in f:
                g = f.split('.sql')[0];
                append({"name": g, "size": s.st_size});
        os.system('umount /etc/domoleaf/mnt');
        json_sorted = sorted(json_obj, key=lambda json_obj: json_obj['name'], reverse=True);
        json_str = json.JSONEncoder().encode(json_sorted);
        connection.send(bytes(json_str, 'utf-8'));

    def backup_db_remove_usb(self, json_obj, connection, db):
        filename = ''.join(['/etc/domoleaf/mnt/backup/domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']);
        if str(json_obj['data'][0]) == '.' or str(json_obj['data'][0]) == '/':
            self.logger.error('The filename is corrupted. Aborting database file removing.')
            return;
        sdx1 = glob.glob('/dev/sd?1')[0];
        if not (os.path.exists(sdx1)):
            return;
        os.system('mount '+sdx1+' /etc/domoleaf/mnt');
        path = '/etc/domoleaf/mnt/backup/';
        try:
            os.stat(filename);
        except Exception as e:
            try:
                filename = filename.split('.tar.gz')[0];
                os.stat(filename);
            except Exception as e:
                self.logger.error("The database file to remove does not exists.")
                self.logger.error(e)
                os.system('umount /etc/domoleaf/mnt');
                return;
        os.remove(filename);
        os.system('umount /etc/domoleaf/mnt');

    def backup_db_restore_usb(self, json_obj, connection, db):
        path = '/etc/domoleaf/mnt/backup/';
        filename = ''.join(['domoleaf_backup_', str(json_obj['data']), '.sql']);
        if json_obj['data'][0] == '.' or json_obj['data'][0] == '/':
            self.logger.error('The filename is corrupted. Aborting database restoring.')
            return;
        sdx1 = glob.glob('/dev/sd?1')[0];
        if not (os.path.exists(sdx1)):
            return;
        os.system('mount '+sdx1+' /etc/domoleaf/mnt');
        try:
            os.stat(path+filename);
            os.system('cp '+path+filename+' /tmp/ && umount /etc/domoleaf/mnt && cd /tmp/');
            os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < /tmp/'+filename);
            os.remove('/tmp/'+filename);
            return;
        except Exception as e:
            try:
                filename += '.tar.gz';
                os.stat(path+filename);
                os.system('cp '+path+filename+' /tmp/ && umount /etc/domoleaf/mnt && cd /tmp/ && tar -xzf '+filename);
            except Exception as e:
                self.logger.error("The database file to restore does not exists.");
                self.logger.error(e);
                os.system('umount /etc/domoleaf/mnt');
                return;
        os.system('umount /etc/domoleaf/mnt');
        os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < /tmp/'+filename.split('.tar.gz')[0]);
        os.remove('/tmp/'+filename);
        os.remove('/tmp/'+filename.split('.tar.gz')[0]);

    def backup_db_create_usb(self, json_obj, connection, db):
        sdx1 = glob.glob('/dev/sd?1')[0];
        if not (os.path.exists(sdx1)):
            return;
        os.system('mount '+sdx1+' /etc/domoleaf/mnt');
        path = '/etc/domoleaf/mnt/backup/';
        filename = 'domoleaf_backup_';
        os.system('mkdir -p '+path);
        t = str(time.time());
        if '.' in t:
            t = t.split('.')[0];
        filename += t+'.sql';
        os.system("mysqldump --defaults-file=/etc/mysql/debian.cnf domoleaf > "+path+filename);
        os.system('cd '+path+' && tar -czf '+filename+'.tar.gz'+' '+filename);
        os.system('rm '+path +filename);
        os.system('umount /etc/domoleaf/mnt');

    def monitor_knx(self, json_obj, connection, db):
        """
        Callback called each time a monitor_knx packet is received.
        Updates room_device_option values in the database and check scenarios.
        """
        daemon_id = self.sql.update_knx_log(json_obj, db);
        doList = self.knx_manager.update_room_device_option(daemon_id, json_obj, db);
        if doList:
            self.scenario.setValues(self.get_global_state(db), self.trigger, self.schedule, connection, doList);
            self.scenario.start();
        connection.close();

    def knx_write_short(self, json_obj, connection, db):
        """
        Callback called each time a knx_write_short packet is received.
        Updates room_device_option values in the database.
        """
        daemons = self.sql.get_daemons(db);
        slave_name = self.get_slave_name(json_obj, daemons);
        if slave_name is None:
            connection.close();
            return None;
        dev = {}
        dev["addr_dst"] = json_obj['data']['addr']
        slave_name = slave_name.split('.')[0];
        self.knx_manager.send_knx_write_short_to_slave(json_obj, dev, slave_name);
        connection.close();
        return None;

    def knx_write_long(self, json_obj, connection, db):
        """
        Callback called each time a knx_write_long packet is received.
        Updates room_device_option values in the database.
        """
        daemons = self.sql.get_daemons(db);
        slave_name = self.get_slave_name(json_obj, daemons);
        if slave_name is None:
            connection.close();
            return None;
        dev = {}
        dev["addr_dst"] = json_obj['data']['addr']
        slave_name = slave_name.split('.')[0];
        self.knx_manager.send_knx_write_long_to_slave(json_obj, dev, slave_name);
        connection.close();
        return None;

    def knx_read(self, json_obj, connection, db):
        """
        Callback called each time a knx_read packet is received.
        """
        daemons = self.sql.get_daemons(db);
        slave_name = self.get_slave_name(json_obj, daemons);
        if slave_name is None:
            return None;
        slave_name = slave_name.split('.')[0];
        self.knx_manager.send_knx_read_request_to_slave(slave_name, json_obj);
        connection.close();

    def monitor_ip(self, json_obj, connection, db):
        """
        Callback called each time a monitor_ip packet is received.
        A new local network scan is performed and the result stored in the database
        """
        self.scanner.scan();
        self.sql.insert_hostlist_in_db(self.scanner._HostList, db);
        self.hostlist = self.scanner._HostList;
        connection.close();

    def monitor_bluetooth(self, json_obj, connection, db):
        """
        TODO
        """
        connection.close();
        return None;

    def monitor_enocean(self, json_obj, connection, db):
        """
        Callback called each time a monitor_enocean packet is received.
        Stores the data in enocean_log table.
        """
        daemon_id = self.sql.update_enocean_log(json_obj, db);
        doList = self.enocean_manager.update_room_device_option(daemon_id, json_obj, db);
        connection.close();
        if doList:
            self.scenario.setValues(self.get_global_state(db), self.trigger, self.schedule, connection, doList);
            self.scenario.start();
        return None;

    def send_to_device(self, json_obj, connection, db):
        """
        Retrieves the good device in the database and builds the request to send.
        """
        hostname = '';
        dm = DeviceManager(int(json_obj['data']['room_device_id']), int(json_obj['data']['option_id']), DEBUG_MODE);
        dev = dm.load_from_db(db);
        if dev is None:
            connection.close();
            return ;
        if 'daemon_name' in dev:
            for host in self.hostlist:
                if dev['daemon_name'] == host._Hostname:
                    hostname = host._Hostname;
                    break;
        function_writing = int(dev['function_writing']);
        if (function_writing > 0):
            try:
                self.functions[function_writing](json_obj, dev, hostname);
            except Exception as e:
                self.logger.error(e);
        connection.close();

    def upnp_audio(self, json_obj, dev, hostname):
        cmd = UpnpAudio(dev['addr'], int(dev['plus1']));
        cmd.action(json_obj);

    def get_ip_ifname(self, ifname):
        """
        Retrieves network interface name from IP address.
        """
        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM);
        try:
            res = socket.inet_ntoa(fcntl.ioctl(s.fileno(),
                                               0x8915,
                                               struct.pack('256s', bytes(ifname, 'utf-8')))[20:24]);
            return res;
        except Exception as e:
            frameinfo = getframeinfo(currentframe());
            self.logger.error('in get_ip_ifname: '+str(e));
            return None;

    def cron_upnp(self, json_obj, connection, db):
        """
        Callback called each time a cron_upnp packet is received.
        """
        local_ip = self.get_ip_ifname("eth0");
        if local_ip is None:
            connection.close();
            return None;
        query = "SELECT configuration_id, configuration_value FROM configuration";
        res = self.sql.mysql_handler_personnal_query(query);
        actions = json_obj['data'];
        for act in actions:
            if act['action'] == 'open':
                for r in res:
                    if int(r[0]) == int(act['configuration_id']):
                        if int(r[0]) == 1:
                            call(["upnpc", "-a", local_ip, str(r[1]), "80", act['protocol']]);
                        elif int(r[0]) == 2:
                            call(["upnpc", "-a", local_ip, str(r[1]), "443", act['protocol']]);
            elif act['action'] == 'close':
                for r in res:
                    if int(r[0]) == int(act['configuration_id']):
                        call(["upnpc", "-d", str(r[1]), act['protocol']]);

    def reload_camera(self, json_obj, connection, db):
        """
        Generation of the file devices.conf located in /etc/domoleaf by default.
        """
        camera_file = open(CAMERA_CONF_FILE, 'w');
        query = "SELECT room_device_id, addr, plus1 FROM room_device WHERE protocol_id = 6";
        res = self.sql.mysql_handler_personnal_query(query, db);
        for r in res:
            ip = str(r[1]);
            if r[1] and utils.is_valid_ip(ip):
                camera_file.write("location /device/"+str(r[0]));
                camera_file.write("/ {\n")
                camera_file.write("\tproxy_buffering off;\n")
                camera_file.write("\tproxy_pass http://"+ip);
                if str(r[2]).isdigit():
                    camera_file.write(":"+str(r[2])+"/;\n}\n\n");
                else:
                    camera_file.write(":/;\n}\n\n");
        camera_file.close();
        call(["service", "nginx", "restart"]);

    def reload_d3config(self, json_obj, connection, db):
        """
        Loads port config. Reading in database and storing.
        """
        query = "SELECT configuration_id, configuration_value FROM configuration";
        res = self.sql.mysql_handler_personnal_query(query, db);
        for r in res:
            self.d3config[str(r[0])] = r[1];

    def check_slave(self, json_obj, connection, db):
        """
        Asks "check_slave" to the slave described in json_obj and waits for answer.
        """
        query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]);
        res = self.sql.mysql_handler_personnal_query(query, db);
        if res is None or not res:
            self.logger.error('in check_slave: No daemon for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        elif len(res) > 1:
            self.logger.error('in check_slave: Too much daemons for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        hostname = res[0][0];
        self_hostname = socket.gethostname();
        if hostname == self_hostname:
            ip = '127.0.0.1';
        else:
            ip = '';
            for h in self.hostlist:
                if hostname in h._Hostname.upper():
                    ip = h._IpAddr;
        if not ip:
            self.logger.error('in check_slave: '+hostname+' not in hostlist. Try perform network scan again.');
            connection.close();
            return ;
        port = self._parser.getValueFromSection('connect', 'port');
        sock = socket.create_connection((ip, port));
        if '.' in self_hostname:
            self_hostname = self_hostname.split('.')[0];
        aes_IV = AESManager.get_IV();
        aes_key = self.get_secret_key(hostname);
        obj_to_send = ''.join(['{"packet_type": "check_slave", "sender_name": "', self_hostname, '"}']);
        encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV);
        spaces = 16 - len(obj_to_send) % 16;
        sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' ')));
        rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 10);
        val = '0';
        version = '';
        interface_knx = '';
        interface_enocean = '';
        data = sock.recv(4096);
        if data:
            decrypt_IV = data[:16].decode();
            decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV);
            data2 = decode_obj.decrypt(data[16:]).decode();
            resp = json.JSONDecoder().decode(data2);
            if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']):
                val = '1';
                version = resp['version'];
                interface_knx = resp['interface_knx'];
                interface_enocean = resp['interface_enocean'];
            connection.send(bytes(version, 'utf-8'));
        connection.close();
        query = ''.join(['UPDATE daemon SET validation=', val, ', version="', version, '" WHERE serial="', hostname, '"']);
        self.sql.mysql_handler_personnal_query(query, db);
        query = ''.join(['UPDATE daemon_protocol SET interface="', interface_knx, '" WHERE daemon_id="', str(json_obj['data']['daemon_id']), '" AND protocol_id="1"']);
        self.sql.mysql_handler_personnal_query(query, db);
        query = ''.join(['UPDATE daemon_protocol SET interface="', interface_enocean, '" WHERE daemon_id="', str(json_obj['data']['daemon_id']), '" AND protocol_id="2"']);
        self.sql.mysql_handler_personnal_query(query, db);
        sock.close();

    def get_secret_key(self, hostname):
        """
        Retrieves the secretkey of 'hostname' in the database.
        """
        query = ''.join(['SELECT serial, secretkey FROM daemon WHERE serial = \'', hostname, '\'']);
        res = self.sql.mysql_handler_personnal_query(query);
        for r in res:
            if r[0] == hostname:
                return str(r[1]);

    def send_mail(self, json_obj, connection, db):
        """
        Callback called each time a send_mail packet is received.
        The parameters are stored in 'json_obj'.
        """
        try:
            from_addr = formataddr((self.d3config['6'], self.d3config['5']));
            host = self.d3config['7'];
            secure = self.d3config['8']
            port = self.d3config['9'];
            username = self.d3config['10'];
            password = self.d3config['11'];
            msg = MIMEMultipart();
            mdr = json_obj['data']['object'];
            msg['Subject'] = json_obj['data']['object'];
            msg['From'] = from_addr;
            msg['To'] = json_obj['data']['destinator'];
            msg.attach(MIMEText(json_obj['data']['message']));
            server = smtplib.SMTP(host, port);
            if (secure == 2):
                server.ehlo();
                server.starttls();
                server.ehlo();
            if not username and not password:
                server.login(self.d3config['5'], username);
            server.sendmail(from_addr, json_obj['data']['destinator'], msg.as_string());
            server.quit();
            connection.close();
        except Exception as e:
            self.logger.error('Error for sending mail');
            self.logger.error(e);
            connection.send(bytes('Error', 'utf-8'));
            connection.close();

    def modif_datetime(self, json_obj, connection, db):
        os.system('date --set '+json_obj['data'][0]);
        os.system('date --set '+json_obj['data'][1]);

    def get_slave_name(self, json_obj, daemons):
        """
        Retrieves the hostname of the daemon described by 'json_obj' in the 'daemons' list.
        """
        daemon_found = False;
        slave_name = '';
        for d in daemons:
            if int(json_obj['data']['daemon']) == int(d[0]):
                daemon_found = True;
                slave_name = str(d[2]);
                break;
        if daemon_found is False:
            frameinfo = getframeinfo(currentframe());
            self.logger.error('in get_slave_name: '+str(json_obj['data']['daemon']));
            return None;
        if str(json_obj['data']['addr']).count('/') != 2:
            frameinfo = getframeinfo(currentframe());
            self.logger.error('in get_slave_name: '+str(json_obj['data']['addr']));
            return None;
        return slave_name;

    def reload_web_server(self):
        """
        Call "service reload nginx"
        """
        self.logger.debug('Reloading web server...');
        call(["service", "nginx", "reload"]);
        self.logger.debug('[ OK ] Done reloading web server.');

    def smartcmd_launch(self, json_obj, connection, db):
        s = Smartcommand(self, int(json_obj['data']))
        s.setValues(connection);
        s.start();

    def triggers_list_update(self, json_obj, connection, db):
        self.trigger.update_triggers_list(db);

    def schedules_list_update(self, json_obj, connection, db):
        self.schedule.update_schedules_list(db);

    def scenarios_list_update(self, json_obj, connection, db):
        self.scenario.update_scenarios_list(db);

    def check_schedules(self, json_obj, connection, db):
        self.schedule.check_all_schedules(connection);

    def launch_calc_logs(self, json_obj, connection, db):
        try:
            self.calcLogs.sort_logs(connection, db);
        except Exception as e:
            self.logger.error(e);

    def get_global_state(self, db):
        query = 'SELECT room_device_id, option_id, opt_value FROM room_device_option';
        res = self.sql.mysql_handler_personnal_query(query, db);
        filtered = [];
        append = filtered.append;
        for elem in res:
            if elem[2]:
                append(elem);
        global_state = [];
        if filtered:
            global_state = filtered;
        else:
            global_state = '';
        return global_state;

    def send_tech(self, json_obj, connection, db):
        query = 'SELECT configuration_value FROM configuration WHERE configuration_id=1';
        http = self.sql.mysql_handler_personnal_query(query, db);
        query = 'SELECT configuration_value FROM configuration WHERE configuration_id=2';
        ssl = self.sql.mysql_handler_personnal_query(query, db);
        json_obj['info']['http'] = http[0][0];
        json_obj['info']['ssl']  = ssl[0][0];
        self.send_request(json_obj, connection, db)

    def send_request(self, json_obj, connection, db):
        if self._parser.getValueFromSection('greenleaf', 'commercial') == "1":
            admin_addr = self._parser.getValueFromSection('greenleaf', 'admin_addr')
            hostname = socket.gethostname()
            GLManager.SendRequest(str(json_obj), admin_addr, self.get_secret_key(hostname))

    def send_interfaces(self, json_obj, connection, db):
        query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]);
        res = self.sql.mysql_handler_personnal_query(query, db);
        if res is None or not res:
            self.logger.error('in send_interfaces: No daemon for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        elif len(res) > 1:
            self.logger.error('in send_interfaces: Too much daemons for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        hostname = res[0][0];
        ip = '';
        for h in self.hostlist:
            if hostname in h._Hostname.upper():
                ip = h._IpAddr;
        if not ip:
            self.logger.error('in send_interfaces: '+hostname+' not in hostlist. Try perform network scan again.');
            connection.close();
            return ;
        port = self._parser.getValueFromSection('connect', 'port');
        sock = socket.create_connection((ip, port));
        self_hostname = socket.gethostname();
        if '.' in self_hostname:
            self_hostname = self_hostname.split('.')[0];
        aes_IV = AESManager.get_IV();
        aes_key = self.get_secret_key(hostname);
        obj_to_send = json.JSONEncoder().encode(
            {
                "packet_type": "send_interfaces", 
                "sender_name": self_hostname,
                "interface_knx": json_obj['data']['interface_knx'],
                "interface_EnOcean": json_obj['data']['interface_EnOcean'],
                "interface_arg_knx": json_obj['data']['interface_arg_knx'],
                "interface_arg_EnOcean": json_obj['data']['interface_arg_EnOcean'],
                "daemon_knx": json_obj['data']['daemon_knx']
            }
        );
        encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV);
        spaces = 16 - len(obj_to_send) % 16;
        sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' ')));
        rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 300);
        re = '';
        data = sock.recv(4096);
        if data:
            decrypt_IV = data[:16].decode();
            host = None;
            for h in self.hostlist:
                if h._IpAddr == ip:
                    host = h;
            decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV);
            data2 = decode_obj.decrypt(data[16:]).decode();
            resp = json.JSONDecoder().decode(data2);
            hostname = host._Hostname;
            if '.' in host._Hostname:
                hostname = host._Hostname.split('.')[0];
            if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']):
                re = '1';
            connection.send(bytes(re, 'utf-8'));
        connection.close();
        sock.close();

    def shutdown_d3(self, json_obj, connection, db):
        """
        Asks "shutdown_d3" to the slave described in json_obj for shutdown daemon.
        """
        query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]);
        res = self.sql.mysql_handler_personnal_query(query, db);
        if res is None or not res:
            self.logger.error('in shutdown_d3: No daemon for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        elif len(res) > 1:
            self.logger.error('in shutdown_d3: Too much daemons for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        hostname = res[0][0];
        ip = '';
        for h in self.hostlist:
            if hostname in h._Hostname.upper():
                ip = h._IpAddr;
        if not ip:
            self.logger.error('in shutdown_d3: '+hostname+' not in hostlist. Try perform network scan again.');
            connection.close();
            return ;
        port = self._parser.getValueFromSection('connect', 'port');
        sock = socket.create_connection((ip, port));
        self_hostname = socket.gethostname();
        if '.' in self_hostname:
            self_hostname = self_hostname.split('.')[0];
        aes_IV = AESManager.get_IV();
        aes_key = self.get_secret_key(hostname);
        obj_to_send = ''.join(['{"packet_type": "shutdown_d3", "sender_name": "', self_hostname, '"}']);
        encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV);
        spaces = 16 - len(obj_to_send) % 16;
        sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' ')));
        connection.close();
        sock.close();

    def reboot_d3(self, json_obj, connection, db):
        """
        Asks "reboot_d3" to the slave described in json_obj for reboot daemon.
        """
        query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]);
        res = self.sql.mysql_handler_personnal_query(query, db);
        if res is None or not res:
            self.logger.error('in reboot_d3: No daemon for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        elif len(res) > 1:
            self.logger.error('in reboot_d3: Too much daemons for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        hostname = res[0][0];
        ip = '';
        for h in self.hostlist:
            if hostname in h._Hostname.upper():
                ip = h._IpAddr;
        if not ip:
            self.logger.error('in reboot_d3: '+hostname+' not in hostlist. Try perform network scan again.');
            connection.close();
            return ;
        port = self._parser.getValueFromSection('connect', 'port');
        sock = socket.create_connection((ip, port));
        self_hostname = socket.gethostname();
        if '.' in self_hostname:
            self_hostname = self_hostname.split('.')[0];
        aes_IV = AESManager.get_IV();
        aes_key = self.get_secret_key(hostname);
        obj_to_send = ''.join(['{"packet_type": "reboot_d3", "sender_name": "', self_hostname, '"}']);
        encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV);
        spaces = 16 - len(obj_to_send) % 16;
        sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' ')));
        connection.close();
        sock.close();

    def wifi_update(self, json_obj, connection, db):
        """
        Send "wifi_update" to the slave described in json_obj for update the wifi configuration.
        """
        query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]);
        res = self.sql.mysql_handler_personnal_query(query, db);
        if res is None or not res:
            self.logger.error('in wifi_update: No daemon for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        elif len(res) > 1:
            self.logger.error('in wifi_update: Too much daemons for id '+str(json_obj['data']['daemon_id']));
            connection.close();
            return ;
        hostname = res[0][0];
        ip = '';
        for h in self.hostlist:
            if hostname in h._Hostname.upper():
                ip = h._IpAddr;
        if not ip:
            self.logger.error('in wifi_update: '+hostname+' not in hostlist. Try perform network scan again.');
            connection.close();
            return ;
        port = self._parser.getValueFromSection('connect', 'port');
        sock = socket.create_connection((ip, port));
        self_hostname = socket.gethostname();
        if '.' in self_hostname:
            self_hostname = self_hostname.split('.')[0];
        aes_IV = AESManager.get_IV();
        aes_key = self.get_secret_key(hostname);
        obj_to_send = ''.join(['{"packet_type": "wifi_update", "sender_name": "', str(self_hostname),
              '", "ssid": "', str(json_obj['data']['ssid']), '", "password": "******", "security": "', str(json_obj['data']['security']),
              '", "mode": "', str(json_obj['data']['mode']), '"}']);
        encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV);
        spaces = 16 - len(obj_to_send) % 16;
        sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' ')));
        rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 300);
        re = '';
        for s in rlist:
            data = sock.recv(4096);
            if not data:
                continue;
            decrypt_IV = data[:16].decode();
            host = None;
            for h in self.hostlist:
                if h._IpAddr == ip:
                    host = h;
            decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV);
            data2 = decode_obj.decrypt(data[16:]).decode();
            resp = json.JSONDecoder().decode(data2);
            hostname = host._Hostname;
            if '.' in host._Hostname:
                hostname = host._Hostname.split('.')[0];
            if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']):
                re = '1';
            connection.send(bytes(re, 'utf-8'));
        connection.close();
        sock.close();
    
    def remote_sql(self, json_obj, connection):
        """
        Execute sql command from configurator
        """
        db = MasterSql();
        req = json_obj['data'].split(';');
        for item in req:
            if item != '':
                db.mysql_handler_personnal_query(item);
        connection.close();
        return;
Example #22
0
                            num=45).astype(int)

# The number of possible budgets that can be allocated to each subcampaign
number_of_budgets = 11

time_horizon = 300

regret = []
for e in range(0, number_of_experiments):

    print('\n')
    print('Starting experiment', e + 1)
    logging.info("\n" + "Starting experiment " + str(e + 1))

    advertising_scenarios = [
        Scenario(daily_budgets=daily_budgets, campaign=0, var=0),
        Scenario(daily_budgets=daily_budgets, campaign=1, var=0),
        Scenario(daily_budgets=daily_budgets, campaign=2, var=0)
    ]

    advertising_learners = [
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets]),
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets]),
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets])
    ]

    pricing_scenarios = [
        CustomizablePricingScenario(sex=1, under_30=1),
        CustomizablePricingScenario(sex=1, under_30=0),
        CustomizablePricingScenario(sex=0)
    ]
Example #23
0
daily_budgets = np.linspace(5000, budget_cap, endpoint=True,
                            num=45).astype(int)

# The number of possible budgets that can be allocated to each subcampaign
number_of_budgets = 11

time_horizon = 100

regret = []
for e in range(0, number_of_experiments):

    print('\n')
    print('Starting experiment', e + 1)

    scenarios = [
        Scenario(daily_budgets=daily_budgets, campaign=0),
        Scenario(daily_budgets=daily_budgets, campaign=1),
        Scenario(daily_budgets=daily_budgets, campaign=2)
    ]

    subcampaigns = [
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets]),
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets]),
        SW_GPTS_Learner(arms=daily_budgets[:number_of_budgets])
    ]

    optimizer = Optimizer(daily_budgets, len(daily_budgets))
    ideal_optimizer = Optimizer(daily_budgets, len(daily_budgets))

    # The optimal and the ideal result obtained by playing the best possible combination of arms known
    optimal_rewards_per_round = np.zeros(time_horizon)
def main(cm_file, cdis_file, output_dir):
    #------------------------------ BEGIN LOADING ------------------------------#

    print(">>>: Carregando cenário")
    scenario = Scenario.Scenario()
    scenario.read(cm_file, cdis_file)

    scenario_name = os.path.splitext(os.path.basename(cm_file))[0]
    cdis_info_name = os.path.splitext(os.path.basename(cdis_file))[0]

    scenario_path = os.path.join(output_dir, scenario_name + cdis_info_name)

    result_log_path = os.path.join(scenario_path, "result_log.csv")
    result_fig_path = os.path.join(scenario_path, "figures")
    result_res_path = os.path.join(scenario_path, "results")

    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if not os.path.exists(scenario_path):
        os.makedirs(scenario_path)
    else:
        shutil.rmtree(scenario_path)
        os.makedirs(scenario_path)

    if not os.path.exists(result_fig_path):
        os.makedirs(result_fig_path)

    if not os.path.exists(result_res_path):
        os.makedirs(result_res_path)

    Visualization.visualize(scenario, scenario.cm.ceList, result_fig_path,
                            "00")

    #------------------------------ END LOADING ------------------------------#

    with open(result_log_path, "w+") as resultLogFile:
        resultLogFile.write(
            "Data_e_Hora;Nome_do_cenário;Caminho_do_arquivo_do_cenário;Nome_das_informações_do_CDIS;Caminho_do_arquivo_das_informações_do_cdis;Tempo_de_execução\n"
        )
        resultLogFile.write(datetime.now().strftime("%d/%m/%Y %H:%M:%S") +
                            ";" + scenario_name + ";" + cm_file + ";" +
                            cdis_info_name + ";" + cdis_file)

        startTime = time.time()
        totalIteration = len(scenario.cdisList)

        for iteration in range(0, totalIteration):
            print("\n>>>: Resolvendo cenário com informações do CDIS=" +
                  str(iteration) + "/" + str(totalIteration))
            scenario.updateChannels(scenario.cdisList[iteration].channelList)

            #------------------------------ BEGIN PREPROCESSING ------------------------------#

            print(">>>: Pré-processando entrada")
            (allCEVarList, ceVarList, ceByChannelVarList,
             interferenceList) = PreProcessing.process(scenario)

            #------------------------------ END PREPROCESSING ------------------------------#

            #------------------------------ BEGIN SOLVER ------------------------------#

            print(">>>: Criando modelo")
            model = gb.Model('cognitive-networks')

            print(">>>: Adicionando variáveis ao modelo")
            allCEModelVarList = []

            for ceVar in allCEVarList:
                allCEModelVarList.append(
                    model.addVar(name=ceVar.name, vtype=gb.GRB.BINARY))

            ceByChannelModelVarList = []

            for ceByChannelVar in ceByChannelVarList:
                ceByChannelModelVarList.append(
                    model.addVar(name=ceByChannelVar.name))

            model.update()

            ceModelVarList = []

            for ceVars in ceVarList:
                modelVarList = []

                for ceVar in ceVars:
                    modelVarList.append(model.getVarByName(ceVar.name))

                ceModelVarList.append(modelVarList)

            print(">>>: Adicionando restrições ao modelo")
            ceId = 0

            for ceModelVars in ceModelVarList:
                model.addConstr(gb.quicksum(ceModelVars), gb.GRB.EQUAL, 1,
                                "Única_configuração_para_CE_" + str(ceId))

                ceId += 1

            interferenceModelVarList = []

            for interference in interferenceList:
                ceVar = interference[0]
                ceTotalInterference = interference[1]
                ceInterferenceList = interference[2]

                if (ceTotalInterference > 0):
                    interferenceModelVar = model.addVar(
                        name="Interferência-devido-" + ceVar.name)
                    interferenceModelVarList.append(interferenceModelVar)
                    model.update()

                    ceInterferenceModelVarList = []

                    for ceInterference in ceInterferenceList:
                        ceInterferenceModelVarList.append(
                            ceInterference * model.getVarByName(ceVar.name))

                    model.addConstr(
                        gb.quicksum(ceInterferenceModelVarList), gb.GRB.EQUAL,
                        interferenceModelVar,
                        "Interferência_provocada_por_" + ceVar.name)

                    model.addConstr(
                        interferenceModelVar, gb.GRB.LESS_EQUAL,
                        args.max_interference,
                        "Máximo_de_interferência_tolerada_de_" + ceVar.name)

            for ceByChannelModelVar in ceByChannelModelVarList:
                ceByChannelVarNameSplit = ceByChannelModelVar.varName.split(
                    '_')

                channel = int(ceByChannelVarNameSplit[3])

                filtredCEModelVarList = PreProcessing.filterCEByChannelModelVar(
                    allCEModelVarList, channel)

                model.addConstr(gb.quicksum(filtredCEModelVarList),
                                gb.GRB.EQUAL, ceByChannelModelVar,
                                "Qtd_de_CE_no_canal_" + str(channel))

                model.addConstr(
                    ceByChannelModelVar, gb.GRB.LESS_EQUAL,
                    min(len(scenario.cm.ceList),
                        ((len(scenario.cm.ceList) /
                          PreProcessing.countAvailableChannels(scenario)) +
                         1)), "Máximo_de_CEs_no_canal_" + str(channel))

            ceId = 0

            for ceModelVars in ceModelVarList:
                potencyList = []

                for ceModelVar in ceModelVars:
                    ceModelVarNameSplit = ceModelVar.varName.split("_")
                    cePotency = int(ceModelVarNameSplit[3])
                    potencyList.append(cePotency * ceModelVar)

                model.addConstr(
                    gb.quicksum(potencyList), gb.GRB.GREATER_EQUAL,
                    args.min_potency,
                    "Mínimo_de_potência_para_máxima_cobertura_do_CE_" +
                    str(ceId))

                ceId += 1

            print(">>>: Definindo a função objetivo")
            model.setObjective(gb.quicksum(interferenceModelVarList),
                               gb.GRB.MINIMIZE)

            model.write(
                os.path.join(result_res_path,
                             "model_it_" + str(iteration) + ".lp"))
            print(">>>: Modelo salvo")

            print(">>>: Otimizando modelo")
            model.optimize()

            resultCEVarList = []

            with open(
                    os.path.join(result_res_path,
                                 "it_" + str(iteration) + ".txt"),
                    "w") as resultFile:
                if (model.status == gb.GRB.Status.OPTIMAL):
                    resultFile.write(">>>: Resultado ótimo:\n")
                    print(">>>: Resultado ótimo:")

                    for ceModelVar in allCEModelVarList:
                        if (ceModelVar.x == 1.0):
                            resultCEVarList.append(ceModelVar.varName)
                            resultFile.write("%s\n" % ceModelVar.varName)
                            print("%s" % ceModelVar.varName)

                    for interferenceModelVar in interferenceModelVarList:
                        ceModelVar = model.getVarByName(
                            interferenceModelVar.varName.split("-")[2])

                        if ((ceModelVar.x == 1.0)
                                and (interferenceModelVar.x > 0.0)):
                            resultFile.write("%s %s\n" %
                                             (interferenceModelVar.varName,
                                              interferenceModelVar.x))
                            print("%s %s" % (interferenceModelVar.varName,
                                             interferenceModelVar.x))

                    for ceByChannelModelVar in ceByChannelModelVarList:
                        resultFile.write("%s %s\n" %
                                         (ceByChannelModelVar.varName,
                                          ceByChannelModelVar.x))
                        print("%s %s" % (ceByChannelModelVar.varName,
                                         ceByChannelModelVar.x))
                elif (model.status == gb.GRB.Status.INFEASIBLE):
                    resultFile.write(">>>: O modelo é inviável!\n")
                    print(">>>: O modelo é inviável!")

                    print(">>>: Computando IIS")
                    model.computeIIS()

                    resultFile.write(
                        "\n>>>: As restrições a seguir não foram satisfeitas:\n"
                    )
                    print(">>>: As restrições a seguir não foram satisfeitas:")
                    for c in model.getConstrs():
                        if c.IISConstr:
                            resultFile.write("%s\n" % c.constrName)
                            print("%s" % c.constrName)

                    print(">>>: Otimizando modelo relaxado")
                    model.feasRelaxS(0, False, False, True)
                    model.optimize()

                    if (model.status == gb.GRB.Status.OPTIMAL):
                        resultFile.write(
                            "\n>>>: Resultado ótimo do modelo relaxado:\n")
                        print(">>>: Resultado ótimo do modelo relaxado:")

                        for ceModelVar in allCEModelVarList:
                            if (ceModelVar.x == 1.0):
                                resultCEVarList.append(ceModelVar.varName)
                                resultFile.write("%s\n" % ceModelVar.varName)
                                print("%s" % ceModelVar.varName)

                        for interferenceModelVar in interferenceModelVarList:
                            ceModelVar = model.getVarByName(
                                interferenceModelVar.varName.split("-")[2])

                            if ((ceModelVar.x == 1.0)
                                    and (interferenceModelVar.x > 0.0)):
                                resultFile.write("%s %s\n" %
                                                 (interferenceModelVar.varName,
                                                  interferenceModelVar.x))
                                print("%s %s" % (interferenceModelVar.varName,
                                                 interferenceModelVar.x))

                        for ceByChannelModelVar in ceByChannelModelVarList:
                            resultFile.write("%s %s\n" %
                                             (ceByChannelModelVar.varName,
                                              ceByChannelModelVar.x))
                            print("%s %s" % (ceByChannelModelVar.varName,
                                             ceByChannelModelVar.x))
                    elif (model.status in (gb.GRB.Status.INF_OR_UNBD,
                                           gb.GRB.Status.UNBOUNDED,
                                           gb.GRB.Status.INFEASIBLE)):
                        print(
                            ">>>: O modelo relaxado não pode ser resolvido porque é ilimitado ou inviável"
                        )
                    else:
                        resultFile.write(
                            ">>>: A otimização parou com status: %d\n" %
                            model.status)
                        print(">>>: A otimização parou com status: %d" %
                              model.status)
                elif (model.status == gb.GRB.Status.UNBOUNDED):
                    resultFile.write(
                        ">>>: O modelo não pode ser resolvido porque é ilimitado\n"
                    )
                    print(
                        ">>>: O modelo não pode ser resolvido porque é ilimitado"
                    )
                else:
                    resultFile.write(
                        ">>>: A otimização parou com status: %d\n" %
                        model.status)
                    print(">>>: A otimização parou com status: %d" %
                          model.status)

            resultCEList = []

            for resultCEVar in resultCEVarList:
                ceVarNameSplit = resultCEVar.split('_')

                ceId = int(ceVarNameSplit[1])
                resultCEChannelNumber = int(ceVarNameSplit[2])
                resultCEPotency = int(ceVarNameSplit[3])

                ce = scenario.cm.ceList[ceId]

                resultCEList.append(
                    DataStructures.CE(ceId, ce.antenna, resultCEChannelNumber,
                                      ce.geoPoint, resultCEPotency,
                                      ce.maxPotency, ce.clientList))

            #------------------------------ END SOLVER ------------------------------#

            #------------------------------ BEGIN VISUALIZATION ------------------------------#

            if (len(resultCEVarList) > 0):
                Visualization.visualize(scenario, resultCEList,
                                        result_fig_path, str(iteration))

            #------------------------------ END VISUALIZATION ------------------------------#

        resultLogFile.write(";" + str((time.time() - startTime)))
Example #25
0
from Scenario import *
from Unite import *
import time
hexagones = []

listetopo= [Topologie(1,"Clear",(136,66,29,0.50)),
 Topologie(2,"Ridge",(91,60,17,0.50)),
 Topologie(3,"Canal",(20,147,20,0.50)),
 Topologie(4,"AccesSPUR",(22,184,78,0.50)),
 Topologie(5,"Vapor pool",(255,255,255,0.50)),
 Topologie(4,"Rise",(146,109,39,0.50)),
 Topologie(7,"Cliffside",(255,206,154,0.50)),
 Topologie(8,"Mesa-Top",(240,195,0,0.50)),
 Topologie(9,"Crater",(0,0,0,0.50))]

Scenario=Scenario(DISPLAY)
UniteLegionnaire= [Legionnaire_Trouper(1)]
def evenr_offset_to_pixel(direction):
    x = size * sqrt(3) * (direction.q - 0.5 * (direction.r&1))
    y = size * 3/2 * direction.r
    return Point(x, y)

for q in range(0,12):
    for r in range(0,14):
        hexagones.append(Hexagone(evenr_offset_to_pixel(Direction(q,r)),listetopo[random.randint(0,8)]))

grid = Grid(hexagones)
grid.display()

while True:
        for event in pygame.event.get():
    def exportSeries( scenario, ID ):
        
        success     = 0
        outputDir   = OutputWriter.getOutputDir( ID )
        if not os.path.isifile('dir'):
            os.mkdir(outputDir)
        else:
            print( 'WARNING! OutputWriter.exportSeries() is overwriting scenario ID %s\n' % int(ID))
        
        # Load dynamic and static variables
        cacheDir    = PathFinder.getCacheDir(scenario)
        with open(os.path.join(cacheDir, 'dynamics.pkl'), 'rb') as handle:
            Dynamic     = pickle.load(handle)
        with open(os.path.join(cacheDir, 'market.pkl'), 'rb') as handle:
            Market      = pickle.load(handle)
        if scenario.isCurrentPolicy() or scenario.postShock().isCurrentPolicy():
            Static       = Dynamic
            StaticMarket = Market
        else:
            with open(os.path.join(cacheDir, 'statics.pkl'), 'rb') as handle:
                Static       = pickle.load(handle)
            with open(os.path.join(PathFinder.getCacheDir(scenario.currentPolicy()), 'market.pkl' ), 'rb') as handle:
                StaticMarket = pickle.load(handle)

        # TEMP TEMP -- This is until Scenario.OpennessPath is not a
        # dependence in ModelSolver.
        #     NOTE: This is not quite right for any openness <> baseline.
        #           Since the delta should be of constant 'baseline'
        #           behavior while the openness changes.
        if scenario.OpennessPath != 'baseline':
            print( 'WARNING! Changing policy delta -- Scenario.Openness=%s  --> baseline.\n' % scenario.OpennessPath )
            params              = scenario.getParams()
            params.OpennessPath = 'baseline'
            bscenario           = Scenario(params)
            if( bscenario.isCurrentPolicy() or bscenario.postShock().isCurrentPolicy() ):
                dynamicsFile = 'dynamics.pkl'
            else:
                dynamicsFile = 'statics.pkl'
            
            with open(os.path.join(PathFinder.getCacheDir(bscenario), dynamicsFile), 'rb') as handle:
                Static          = pickle.load(handle)
            with open(os.path.join(PathFinder.getCacheDir(bscenario.currentPolicy()), 'market.pkl' ), 'rb') as handle:
                StaticMarket    = pickle.load(handle)
        
        # END TEMP

        # Load Dynamics for Microsim baseline add factor fix-up   
        # NOTE: Fix-up factor is going to be Dynamic_base/Dynamic_open_base
        try:
            with open(os.path.join(PathFinder.getCacheDir(scenario.currentPolicy().open()), 'dynamics.pkl'), 'rb') as handle:
                Dynamic_open_base   = pickle.load(handle)
            with open(os.path.join(PathFinder.getCacheDir(scenario.baseline()), 'dynamics.pkl')) as handle:
                Dynamic_base        = pickle.load(handle)
        except:
            raise Exception('WARNING! Cannot read files to make "Dynamic baseline". Skipping...\n' )
            return success
        
        ## Write AGGREGATES.csv
        
        ##
        # Build the source series.
        #   For static series, read from input interfaces
        firstYear = scenario.TransitionFirstYear
        lastYear  = scenario.TransitionLastYear - 1
        numYears  = lastYear - firstYear + 1

        pathFinder = PathFinder(scenario)
        source_series = {}
        
        projections_file = pathFinder.getProjectionsInputPath( 'Projections' )
        source_series['projections'] = InputReader.read_series(projections_file, 'Year', firstYear, lastYear)
               
        taxcalculator_file = pathFinder.getTaxCalculatorInputPath( 'Aggregates' )
        source_series['taxcalculator'] = InputReader.read_series(taxcalculator_file, 'Year', firstYear, lastYear )
        
        oasicalculator_file = pathFinder.getOASIcalculatorInputPath( 'aggregates' );
        source_series['oasicalculator'] = InputReader.read_series(oasicalculator_file, 'Year', firstYear, lastYear );
             
        source_series['Market'] = Market
        
        # Add GDP deflator changes series
        p_series        = InputReader.read_series(projections_file, 'Year', firstYear-1, lastYear)
        gdp_deflator    = p_series['GDPDeflator']
        inflation_rate  = np.ones((1,numYears))
        for i in range(numYears):
            inflation_rate[i] = gdp_deflator[i+1]/gdp_deflator[i]
        
        # Construct dynamic scaling series

        ##  Helper function 
        @staticmethod
        def makeDeltaSeries( Source1, Source2, var_name ):
            
            if var_name == '_add_inflation_to_interestrate':
                delta   = inflation_rate
            elif var_name == '_asis':
                delta   = np.ones((1, numYears))
            elif var_name == '_nonindexed':
                series1 = [np.ones((1,10)), Source1['outs'][10:]]
                series2 = [np.ones((1,10)), Source2['outs'][10:]]
                delta   = series1 / series2
                delta   = delta[0:numYears]     # truncate in case too long
            else:
                series1 = Source1[var_name]
                series2 = Source2[var_name]
                delta   = series1 / series2
                    
            return delta

        dynamic_series = {}
                
        # Iterate over series names
        for o in OutputWriter.series_names.keys():
            series_name = o
            var_name    = OutputWriter.series_names[series_name]['var_name']
            source      = OutputWriter.series_names[series_name]['source']
            source_name = OutputWriter.series_names[series_name]['source_name']
            if len(source_name) == 0: 
                source_name = series_name
            
            # Calculate Dynamic/Static delta
            delta = makeDeltaSeries( Dynamic, Static, var_name )
            
            # Calculate fixup factor for microsim (as delta open_baseline/baseline)
            fixup = makeDeltaSeries( Dynamic_base, Dynamic_open_base, var_name )

            # Calculate scaling series
            v_scale = delta * fixup   # Adjust by fix-up factor
            v_scale[np.isnan(v_scale)] = 1
            
            # Apply to source series
            v_source = source_series[OutputWriter.series_names[series_name]['source']]['source_name']
            if np.size(v_source,2) == numYears:
                v_source = np.transpose(v_source)   # frickin' matlab and its vector direction
              
            if var_name == '_add_inflation_to_interestrate':
                dynamic_series[series_name] = ((1+v_source) * delta) - 1 
            else:
                dynamic_series[series_name] = v_source * v_scale
            
        # Write series to file
        series_table = pd.DataFrame(dynamic_series)
        
        series_table.to_csv((os.path.join(outputDir, 'Aggregates.csv')))
                
        ## Write DYNAMICS.CSV
            
        Dynamic['outvars']      = OutputWriter.dynamic_outvars
        Market['outvars']       = OutputWriter.market_outvars
        Static['outvars']       = Dynamic.outvars
        StaticMarket['outvars'] = Market.outvars
        
        # Create new column names for Static
        for o in Static['outvars'].keys():
            p = Static['outvars'][o]
            Static['outvars'][o] = 'STATIC_' + p
        
        for o in StaticMarket['outvars'].keys():
            p = StaticMarket['outvars'][o]
            StaticMarket['outvars'][o] = 'STATIC_' + p
        
        # Load steady state variables
        with open(os.path.join(PathFinder.getCacheDir(scenario.currentPolicy().steady()), 'dynamics.pkl'), 'rb') as handle:
            Dynamic_steady = pickle.load(handle)
        with open(os.path.join(PathFinder.getCacheDir(scenario.currentPolicy().steady()), 'market.pkl'), 'rb') as handle:
            Market_steady  = pickle.load(handle)
                
        # Append steady state variables and reset the first year
        firstYear = firstYear - 1
        for o in Dynamic['outvars'].keys():
            Dynamic[o] = np.hstack((Dynamic_steady[o], Dynamic[o]))
            Static[o]  = np.hstack((Dynamic_steady[o], Static[o]))
        
        for o in Market['outvars'].keys():
            Market[o]       = np.hstack((Market_steady[o], Market[o]))
            StaticMarket[o] = np.hstack((Market_steady[o], StaticMarket[o]))
                
        # Concatenate structs
        output_series = {}

        for M in [Dynamic, Market, Static, StaticMarket]:
            for o in M['outvars'].keys():
                p = M['outvars'][o]
                output_series[p] = M[o]
        
        # Write series to file
        series_table = pd.DataFrame(output_series)
        series_table.to_csv(os.path.join(outputDir, 'Dynamics.csv' ))
        
        success = 1
        
        return success