Beispiel #1
0
def Run(source: str, debug: bool):

    try:
        prog = Program(source)

        preproc = Preprocessor()
        preproc.preprocess(prog)

        # for l in prog.preprocessed:
        #    print(l)

        assembler = Assembler()
        assembler.assemble(prog)

        # for l in prog.labels:
        #     print(l, f" Position: {l.position}")

        # for i in prog.instructions:
        #     print(i, f" Position: {i.position}  Label: {i.labelName}")
        #     for p in i.parameters:
        #         print(" ", p, end = "")
        #         if p.labelName != None:
        #             print(f"   {p.labelName}")
        #         else:
        #             print("")

        # for b in prog.binary:
        #     print("%04X " % b, end = "")

        # print("")

        computer = Computer()
        computer.loadProgram(prog)

        if debug == False:
            computer.run()

            for l in prog.labels:
                if l.size > 0:
                    print("%13s (%6s[%3d]): " % (l.name, l.datatype, l.size), end ="")
                    for i in range(l.position, l.position + l.size):
                        print("%d " % computer.memory[i], end = "")
                    print("")

        else:
            debugger = Debugger(computer, prog)
            debugger.run()

    except PreprocessorError as e:
        print(e)

    except AssemblerError as e:
        print(e)

    except CompilerError as e:
        print(e)

    except Exception as e:
        raise e
Beispiel #2
0
 def __init__(self, verbose):
     self.debugger = Debugger(verbose)
     try:
         urllib.request.urlopen('http://google.com')
     except:
         self.debugger.rise_Error(
             "Internet Connection Cannot Be Established")
         exit(1)
Beispiel #3
0
def excepthook(etype, value, tb):
    if etype is bdb.BdbQuit:
        return
        
    defaultTB(etype, value, tb)

    if tb and not sys.stdout.closed and \
            hasattr(sys.stdout, "isatty") and \
            sys.stdout.isatty() and \
            etype.__name__ != "DistributionNotFound":
        Debugger.post_mortem(tb)
def run():
    m = MyModel()

    d = Debugger()
    d.debug(m)

    pyModelBuilder = PythonModelBuilder()
    pyModelBuilder.build(m, 
                         persistanceModule=m.pythonDataModule,
                         comboBoxModule=m.comboBoxModule,
                         itemModelModule=m.itemModelModule,
                         treeViewModule=m.treeViewModule)
Beispiel #5
0
class MatrixDownloader:
    def __init__(self, verbose):
        self.debugger = Debugger(verbose)
        try:
            urllib.request.urlopen('http://google.com')
        except:
            self.debugger.rise_Error(
                "Internet Connection Cannot Be Established")
            exit(1)

    def getCSRMatrix(self, matrix_id, diagonal):
        self.debugger.debug("Downloading Matrix with ID: " + str(matrix_id))
        #MM_path = str(subprocess.run(['ssget', '-e', '-i', str(matrix_id)], stdout=subprocess.PIPE,
        #                  stderr=subprocess.STDOUT)).split("'")[-2][:-2]
        result = subprocess.run(
            ['ssget', '-e', '-i', str(matrix_id)], stdout=subprocess.PIPE)
        MM_path = result.stdout.decode().strip()
        self.debugger.debug("Matrix Path is " + str(MM_path))
        self.debugger.debug("Matrix Downloaded")
        Matrix_CSR = scipy.io.mmread(MM_path).tocsr()
        if diagonal == "lower":
            return tril(Matrix_CSR, k=0, format="csr")
        if diagonal == "upper":
            return triu(Matrix_CSR, k=0, format="csr")
        else:
            return Matrix_CSR
Beispiel #6
0
    def getTimeLine(self, cIndex, cMotion):
        # Initial the Value

        self.initialTimeLine(cIndex, cMotion)

        # After Init we detect turning
        #self.detectTurning(self.timeLineDirection)

        #Debugger.showList(cMotion, "cMotion")
        #Debugger.showList(cIndex, "CIndex")

        # A function to add the end Time
        self.addEndTime(cIndex, cMotion)

        Debugger.showList(self.timeLineName, "TimeLineName")
        # Debugger.showList(self.timeLineTotalFrame, "TimeLineTotalFrame")
        Debugger.showList(self.timeLineEachFrame, "TimeLineEachFrame")
        Debugger.showList(self.timeLineCoordinate, "TimeLineCoordinate")
        Debugger.showList(self.timeLineDirection, "TimeLineDirection")
        # Adjust Direction

        # set the Final Timeline
        #self.finalTimeLine()

        solvedTimeLine = [
            self.timeLineName, self.timeLineCoordinate, self.timeLineDirection,
            self.timeLineEachFrame
        ]

        return solvedTimeLine
Beispiel #7
0
    def __init__(self, settings, dataset):
        self.settings = settings
        self.dataset = dataset
        self.dataPreprocesser = dataset.dataPreprocesser
        self.debugger = Debugger.Debugger(settings)

        self.use_sigmoid_or_softmax = 'softmax'
        assert self.use_sigmoid_or_softmax == 'softmax'

        #BACKBONE = 'resnet34'
        #BACKBONE = 'resnet50' #batch 16
        #BACKBONE = 'resnet101' #batch 8
        BACKBONE = settings.model_backend
        custom_weights_file = "imagenet"

        #resolution_of_input = self.dataset.datasetInstance.IMAGE_RESOLUTION
        resolution_of_input = None
        self.model = self.create_model(backbone=BACKBONE,
                                       custom_weights_file=custom_weights_file,
                                       input_size=resolution_of_input,
                                       channels=3)
        self.model.summary()

        self.local_setting_batch_size = settings.train_batch  #8 #32
        self.local_setting_epochs = settings.train_epochs  #100

        self.train_data_augmentation = True

        # saving paths for plots ...
        self.save_plot_path = "plots/"
Beispiel #8
0
    def __init__(self, settings, BACKBONE='resnet34', verbose=1):
        self.settings = settings
        self.debugger = Debugger.Debugger(settings)
        self.verbose = verbose

        self.use_sigmoid_or_softmax = 'softmax'
        assert self.use_sigmoid_or_softmax == 'softmax'

        #BACKBONE = 'resnet34'
        #BACKBONE = 'resnet50' #batch 16
        #BACKBONE = 'resnet101' #batch 8
        #BACKBONE = 'seresnext50' #trying batch 16 as well
        custom_weights_file = "imagenet"

        #weights from imagenet finetuned on aerial data specific task - will it work? will it break?
        #custom_weights_file = "/scratch/ruzicka/python_projects_large/AerialNet_VariousTasks/model_UNet-Resnet34_DSM_in01_95percOfTrain_8batch_100ep_dsm01proper.h5"

        resolution_of_input = None
        #resolution_of_input = 256
        self.model = self.create_model(backbone=BACKBONE,
                                       custom_weights_file=custom_weights_file,
                                       input_size=resolution_of_input,
                                       channels=3)

        if self.verbose >= 2:
            self.model.summary()
Beispiel #9
0
def get_balanced_dataset(in_memory=False, TMP_WHOLE_UNBALANCED=False):
    from ActiveLearning.LargeDatasetHandler_AL import LargeDatasetHandler_AL
    import Settings

    # init structures
    import mock
    args = mock.Mock()
    args.name = "test"

    settings = Settings.Settings(args)
    WholeDataset = LargeDatasetHandler_AL(settings)

    # load paths of our favourite dataset!
    import DataLoader, DataPreprocesser, Debugger
    import DatasetInstance_OurAerial

    dataLoader = DataLoader.DataLoader(settings)
    debugger = Debugger.Debugger(settings)

    #h5_file = settings.large_file_folder + "datasets/OurAerial_preloadedImgs_subBAL3.0_1.0_sel2144_res256x256.h5"
    h5_file = settings.large_file_folder + "datasets/OurAerial_preloadedImgs_subBAL3.0_1.0_sel2144_res256x256_SMALLER.h5"

    datasetInstance = DatasetInstance_OurAerial.DatasetInstance_OurAerial(
        settings, dataLoader, "256_cleanManual")

    if not TMP_WHOLE_UNBALANCED:
        # ! this one automatically balances the data + deletes misfits in the resolution
        data, paths = datasetInstance.load_dataset()
        lefts_paths, rights_paths, labels_paths = paths
        print("Paths: L,R,Y ", len(lefts_paths), len(rights_paths),
              len(labels_paths))

    else:
        # ! this one loads them all (CHECK: would some be deleted?)
        paths = datasetInstance.load_dataset_ONLY_PATHS_UPDATE_FROM_THE_OTHER_ONE_IF_NEEDED(
        )
        lefts_paths, rights_paths, labels_paths = paths
        print("Paths: L,R,Y ", len(lefts_paths), len(rights_paths),
              len(labels_paths))

    WholeDataset.initialize_from_just_paths(paths)

    if in_memory:
        assert not TMP_WHOLE_UNBALANCED
        #WholeDataset.keep_it_all_in_memory()
        WholeDataset.keep_it_all_in_memory(h5_file)

    npy_path = settings.large_file_folder + "datasets/OurAerial_preloadedImgs_BALCLASS.npy"

    I_WANT_TO_RECOMPUTE_THE_LABELS = False
    if I_WANT_TO_RECOMPUTE_THE_LABELS:
        assert False  # don't want to mistakenly recompute these ...
        WholeDataset.compute_per_tile_class_in_batches()
        WholeDataset.save_per_tile_class(npy_path)

    WholeDataset.load_per_tile_class(npy_path)

    WholeDataset.report()

    return WholeDataset
Beispiel #10
0
    def __init__(self, settings, dataset):
        self.settings = settings
        self.dataset = dataset
        self.dataPreprocesser = dataset.dataPreprocesser
        self.debugger = Debugger.Debugger(settings)

        self.use_sigmoid_or_softmax = 'softmax'
        assert self.use_sigmoid_or_softmax == 'softmax'

        #BACKBONE = 'resnet34'
        #BACKBONE = 'resnet50' #batch 16
        #BACKBONE = 'resnet101' #batch 8
        BACKBONE =  settings.model_backend
        custom_weights_file = "imagenet"

        #weights from imagenet finetuned on aerial data specific task - will it work? will it break?
        #custom_weights_file = "/scratch/ruzicka/python_projects_large/AerialNet_VariousTasks/model_UNet-Resnet34_DSM_in01_95percOfTrain_8batch_100ep_dsm01proper.h5"

        #resolution_of_input = self.dataset.datasetInstance.IMAGE_RESOLUTION
        resolution_of_input = None
        self.model = self.create_model(backbone=BACKBONE, custom_weights_file=custom_weights_file, input_size = resolution_of_input, channels = 3)
        self.model.summary()

        self.local_setting_batch_size = settings.train_batch #8 #32
        self.local_setting_epochs = settings.train_epochs #100

        self.train_data_augmentation = True


        # saving paths for plots ...
        self.save_plot_path = "plots/"
Beispiel #11
0
    def debugger(self):
        """Call up the pdb debugger if desired, always clean up the tb reference.

        If the call_pdb flag is set, the pdb interactive debugger is
        invoked. In all cases, the self.tb reference to the current traceback
        is deleted to prevent lingering references which hamper memory
        management.

        Note that each call to pdb() does an 'import readline', so if your app
        requires a special setup for the readline completers, you'll have to
        fix that by hand after invoking the exception handler."""

        if self.call_pdb:
            if self.pdb is None:
                self.pdb = Debugger.Pdb()
            # the system displayhook may have changed, restore the original for pdb
            dhook = sys.displayhook
            sys.displayhook = sys.__displayhook__
            self.pdb.reset()
            while self.tb.tb_next is not None:
                self.tb = self.tb.tb_next
            try:
                self.pdb.interaction(self.tb.tb_frame, self.tb)
            except IndexError:
                print '*** ERROR ***'
                print 'This version of pdb has a known bug and crashed.'
                print 'Returning to IPython...'
            sys.displayhook = dhook
        del self.tb
Beispiel #12
0
def ShowRam():
    if debug.ramview == None and atari.cart != None:
        debug.ramview = Debugger.RamViewer(
            gui, [('RIOT', atari.ram)] + atari.cart.getRam() +
            [('Cartridge', atari.cart.getRom())], FixedFont, atari.reg)
    elif debug.ramview != None:
        debug.ramview.Show()
Beispiel #13
0
    def __init__(self, settings, dataLoader, variant="256_cleanManual"):
        self.settings = settings
        self.dataLoader = dataLoader
        self.debugger = Debugger.Debugger(settings)
        self.DEBUG_TURN_OFF_BALANCING = False

        self.variant = variant  # 256 = 256x256, 112 = 112x112

        self.local_setting_skip_rows = 2
        self.local_setting_skip_columns = 2

        self.save_path_ = "OurAerial_preloadedImgs_sub"

        if self.variant == "256_cleanManual":
            self.dataset_version = "256x256_cleanManual"

            self.SUBSET = -1
            self.IMAGE_RESOLUTION = 256
            self.CHANNEL_NUMBER = 4
            self.LOAD_BATCH_INCREMENT = 10000  # loads in this big batches for each balancing

            self.default_raster_shape = (256, 256, 4)
            self.default_vector_shape = (256, 256)

            # self.hdf5_path = self.settings.large_file_folder + "datasets/OurAerial_preloadedImgs_subBAL3.0_1.0_sel2144_res256x256.h5"
            self.hdf5_path = self.settings.large_file_folder + "datasets/OurAerial_preloadedImgs_subBAL3.0_1.0_sel2144_res256x256_SMALLER.h5"

            self.bigger_than_percent = 3.0  # try?
            self.smaller_than_percent = 1.0  # there shouldn't be much noise in this ...

            self.split_train = 1900
            self.split_val = 2000

        elif self.variant == "6368_special":
            self.local_setting_skip_rows = 0
            self.local_setting_skip_columns = 0

            self.dataset_version = "6368_special"
            self.SUBSET = None  #all
            self.IMAGE_RESOLUTION = 6368
            self.CHANNEL_NUMBER = 4
            self.LOAD_BATCH_INCREMENT = 20  # from 14 images

            self.bigger_than_percent = 0.0  # doesn't make much sense here!
            self.smaller_than_percent = 0.0  # doesn't make much sense here!

            self.default_raster_shape = (6368, 6368, 4)
            self.default_vector_shape = (6368, 6368)

            # decent dataset:
            self.hdf5_path = self.settings.large_file_folder + "datasets/OurAerial_preloadedImgs_subBAL0.0_0.0_sel13_res6368x6368.h5"

            # spliting <14>
            # 0 train, 0 val, 14 test
            self.split_train = 0
            self.split_val = 0
            self.DEBUG_TURN_OFF_BALANCING = True
Beispiel #14
0
    def __init__(self, settings, init_source=1):
        self.settings = settings
        self.dataLoader = DataLoader.DataLoader(settings)
        self.debugger = Debugger.Debugger(settings)

        if init_source == 1:
            self.init_from_stable_datasets()
        else:
            print("Init manually from data and labels")
            self.datasetInstance = None
            self.dataPreprocesser = None
Beispiel #15
0
 def open_debugger(self):
     if self.interp.rpcclt:
         dbg_gui = RemoteDebugger.start_remote_debugger(
             self.interp.rpcclt, self)
     else:
         dbg_gui = Debugger.Debugger(self)
     self.interp.setdebugger(dbg_gui)
     dbg_gui.load_breakpoints()
     sys.ps1 = "[DEBUG ON]\n>>> "
     self.showprompt()
     self.set_debugger_indicator()
def simulate(canvas):
    global theCanvas
    theCanvas = canvas
    mname = canvas.statusbar.getState(StatusBar.MODEL)[1][0]
    if not mname:
        mname = "Nonamed.des"
    else:
        if mname.endswith(".py"):
            mname = mname[:len(mname) - 3]
        mname = mname + ".des"

    global sc
    sc = generate_description(canvas, 0)

    global debugger
    debugger = Debugger()

    global eventhandler
    eventhandler = EventHandler(mname,
                                callback=debugger.EventDebugger,
                                use_gui=1,
                                modeltext=sc["desc"])
    eventhandler.final.append("SVMAToM3Plugin.finalize_simulation()")
    debugger.CustomizeEvent(event_callback, None, 1)

    global root
    root = canvas.ASGroot.listNodes

    DefaultInterpreter.runsource("eventhandler=SVMAToM3Plugin.eventhandler")
    DefaultInterpreter.runsource("debugger=SVMAToM3Plugin.debugger")

    debugger.SetEventHandler(eventhandler)

    eventhandler.run_initializer()
    highlight_states(eventhandler.state, sc)
    highlight_trans(eventhandler, sc, root)

    # Cannot start the Tk mainloop again
    # eventhandler.run_interactor()
    DefaultInterpreter.runsource(
        "setup_gui_debugger(eventhandler, debugger, 0, 0)")
 def __init__(self, address, port):
     Debugger.printAction('Initializing Khepera robot API' )
     self.comm = Comm(address, port)
     software = self.readSoftwareVersion()
     Debugger.printAction('Khepera software: Bios ' + str( software[0]) + ', Revision ' + str( software[1]) )
     Debugger.printAction('Initializing motors')
     self.initMotors()
Beispiel #18
0
def get_unbalanced_dataset(in_memory=False):
    assert in_memory == False

    # prep to move the dataset to >> /cluster/work/igp_psr/ruzickav <<
    # instead of loading indiv files, load batches in h5 files

    from ActiveLearning.LargeDatasetHandler_AL import LargeDatasetHandler_AL
    import Settings

    # init structures
    import mock
    args = mock.Mock()
    args.name = "test"

    settings = Settings.Settings(args)
    WholeDataset = LargeDatasetHandler_AL(settings)

    # load paths of our favourite dataset!
    import DataLoader, DataPreprocesser, Debugger
    import DatasetInstance_OurAerial

    dataLoader = DataLoader.DataLoader(settings)
    debugger = Debugger.Debugger(settings)

    datasetInstance = DatasetInstance_OurAerial.DatasetInstance_OurAerial(
        settings, dataLoader, "256_cleanManual")

    # ! this one loads them all (CHECK: would some be deleted?)
    paths = datasetInstance.load_dataset_ONLY_PATHS_UPDATE_FROM_THE_OTHER_ONE_IF_NEEDED(
    )
    lefts_paths, rights_paths, labels_paths = paths
    print("Paths: L,R,Y ", len(lefts_paths), len(rights_paths),
          len(labels_paths))

    WholeDataset.initialize_from_just_paths(paths)

    if in_memory:
        WholeDataset.keep_it_all_in_memory()

    npy_path = settings.large_file_folder + "datasets/OurAerial_preloadedImgs_unBALCLASS.npy"

    I_WANT_TO_RECOMPUTE_THE_LABELS = False
    if I_WANT_TO_RECOMPUTE_THE_LABELS:
        assert False  # don't want to mistakenly recompute these ...
        WholeDataset.compute_per_tile_class_in_batches()
        WholeDataset.save_per_tile_class(npy_path)

    WholeDataset.load_per_tile_class(npy_path)

    WholeDataset.report()

    return WholeDataset
Beispiel #19
0
 def __init__(self, address, port):
     Debugger.printAction('Initializing Khepera robot API')
     self.comm = Comm(address, port)
     software = self.readSoftwareVersion()
     Debugger.printAction('Khepera software: Bios ' + str(software[0]) +
                          ', Revision ' + str(software[1]))
     Debugger.printAction('Initializing motors')
     self.initMotors()
Beispiel #20
0
def start_debugger(rpchandler, gui_adap_oid):
    """Start the debugger and its RPC link in the Python subprocess

    Start the subprocess side of the split debugger and set up that side of the
    RPC link by instantiating the GUIProxy, Idb debugger, and IdbAdapter
    objects and linking them together.  Register the IdbAdapter with the
    RPCServer to handle RPC requests from the split debugger GUI via the
    IdbProxy.

    """
    gui_proxy = GUIProxy(rpchandler, gui_adap_oid)
    idb = Debugger.Idb(gui_proxy)
    idb_adap = IdbAdapter(idb)
    rpchandler.register(idb_adap_oid, idb_adap)
    return idb_adap_oid
    def __init__(self, settings, create_inmemory_or_ondemand = "ondemand"):
        self.settings = settings

        self.KEEP_IT_IN_MEMORY_OR_LOAD_ON_DEMAND = create_inmemory_or_ondemand
        #self.KEEP_IT_IN_MEMORY_OR_LOAD_ON_DEMAND = "inmemory" # or "ondemand"
        self.data_in_memory = {}
        self.labels_in_memory = {}

        self.N_of_data = None
        self.indices = None # Array of indices, doesn't have to be sorted
        self.original_indices = None # Array of indices as they were in their original order (which coincidentally was range(N))
                                     # will be used as a reference to which indices have been removed ...
                                     # Used only with the "RemainingUnlabeledSet" (which is never added items, just slowly poped)
        self.paths = [{},{},{}] # these should also be dictionaries so that you get path from the idx
        self.dataaug_descriptors = {}

        self.per_tile_class = {} # class "change" or "no-change" - in some cases we will precompute these!
        self.has_per_tile_class_computed = False

        # for balance stats
        self.debugger = Debugger.Debugger(settings)
Beispiel #22
0
def start_remote_debugger(rpcclt, pyshell):
    """Start the subprocess debugger, initialize the debugger GUI and RPC link

    Request the RPCServer start the Python subprocess debugger and link.  Set
    up the Idle side of the split debugger by instantiating the IdbProxy,
    debugger GUI, and debugger GUIAdapter objects and linking them together.

    Register the GUIAdapter with the RPCClient to handle debugger GUI
    interaction requests coming from the subprocess debugger via the GUIProxy.

    The IdbAdapter will pass execution and environment requests coming from the
    Idle debugger GUI to the subprocess debugger via the IdbProxy.

    """
    global idb_adap_oid

    idb_adap_oid = rpcclt.remotecall("exec", "start_the_debugger",\
                                   (gui_adap_oid,), {})
    idb_proxy = IdbProxy(rpcclt, pyshell, idb_adap_oid)
    gui = Debugger.Debugger(pyshell, idb_proxy)
    gui_adap = GUIAdapter(rpcclt, gui)
    rpcclt.register(gui_adap_oid, gui_adap)
    return gui
 def sendCommand(self, command):
     
     Debugger.printCommand( 'S: '+str(command) )
     
     if Config.ENABLE_COMM == False:
         s = 'X,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15'
         Debugger.printCommand( 'R: '+s )
         return s
     
     self.sock.send(command+'\r')
     s = ""
     c = '0'
     while(c!='\r') :
         c = self.sock.recv(1)
         s = s + str(c)
     
     s = s[0:len(s)-2] # removing \n\r
     
     Debugger.printCommand( 'R: '+s )
     
     return s
Beispiel #24
0
def hvac_control(cfg,
                 advise_cfg,
                 tstats,
                 client,
                 thermal_model,
                 zone,
                 building,
                 now,
                 debug=False,
                 simulate=False):
    """
    
    :param cfg:
    :param advise_cfg:
    :param tstats:
    :param client:
    :param thermal_model:
    :param zone:
    :param now: datetime object in UTC which tells the control what now is.
    :param debug: wether to actuate the tstat.
    :param simulate: boolean whether to run the control as a simulation or to actually actuate.
    :return: boolean, dict. Success Boolean indicates whether writing action has succeeded. Dictionary {cooling_setpoint: float,
    heating_setpoint: float, override: bool, mode: int} and None if success boolean is flase.
    """

    try:

        zone_temperatures = {
            dict_zone: dict_tstat.temperature
            for dict_zone, dict_tstat in tstats.items()
        }
        tstat = tstats[zone]
        tstat_temperature = zone_temperatures[
            zone]  # to make sure we get all temperatures at the same time

        # get datamanagers
        dataManager = DataManager(cfg, advise_cfg, client, zone, now=now)
        thermal_data_manager = ThermalDataManager(cfg, client)

        safety_constraints = dataManager.safety_constraints()
        prices = dataManager.prices()
        building_setpoints = dataManager.building_setpoints()
        if simulate or not advise_cfg["Advise"]["Occupancy_Sensors"]:
            occ_predictions = dataManager.preprocess_occ_cfg()
        else:
            occ_predictions = dataManager.preprocess_occ_mdal()

        if not simulate:
            # TODO FIX THE UPDATE STEP. PUT THIS OUTSIDE OF HVAC CONTROL.
            # NOTE: call update before setWeatherPredictions and set_temperatures
            thermal_model.update(zone_temperatures,
                                 interval=cfg["Interval_Length"])

        # need to set weather predictions for every loop and set current zone temperatures.
        thermal_model.set_temperatures(zone_temperatures)

        # ===== Future and past outside temperature combine =====
        # Get correct weather predictions.
        # we might have that the given now is before the actual current time
        # hence need to get historic data and combine with weather predictions.

        # finding out where the historic/future intervals start and end.
        utc_now = utils.get_utc_now()

        # If simulation window is partially in the past and in the future
        if utils.in_between_datetime(
                utc_now, now, now + datetime.timedelta(
                    hours=advise_cfg["Advise"]["MPCPredictiveHorizon"])):
            historic_start = now
            historic_end = utc_now
            future_start = utc_now
            future_end = now + datetime.timedelta(
                hours=advise_cfg["Advise"]["MPCPredictiveHorizon"])

        # If simulation window is fully in the future
        elif now >= utc_now:
            historic_start = None
            historic_end = None
            future_start = now
            future_end = now + datetime.timedelta(
                hours=advise_cfg["Advise"]["MPCPredictiveHorizon"])

        # If simulation window is fully in the past
        else:
            historic_start = now
            historic_end = now + datetime.timedelta(
                hours=advise_cfg["Advise"]["MPCPredictiveHorizon"])
            future_start = None
            future_end = None

        # Populating the outside_temperatures dictionary for MPC use. Ouput is in cfg timezone.
        outside_temperatures = {}
        if future_start is not None:
            # TODO implement end for weather_fetch
            future_weather = dataManager.weather_fetch(start=future_start)
            outside_temperatures = future_weather

        # Combining historic data with outside_temperatures correctly if exists.
        if historic_start is not None:
            historic_weather = thermal_data_manager._get_outside_data(
                historic_start, historic_end, inclusive=True)
            historic_weather = thermal_data_manager._preprocess_outside_data(
                historic_weather.values())

            # Down sample the historic weather to hourly entries, and take the mean for each hour.
            historic_weather = historic_weather.groupby(
                [pd.Grouper(freq="1H")])["t_out"].mean()

            # Convert historic_weather to cfg timezone.
            historic_weather.index = historic_weather.index.tz_convert(
                tz=cfg["Pytz_Timezone"])

            # Popluate the outside_temperature array. If we have the simulation time in the past and future then
            # we will take a weighted averege of the historic and future temperatures in the hour in which
            # historic_end and future_start happen.
            for row in historic_weather.iteritems():
                row_time, t_out = row[0], row[1]

                # taking a weighted average of the past and future outside temperature since for now
                # we only have one outside temperature per hour.
                if row_time.hour in outside_temperatures and \
                                row_time.hour == historic_end.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).hour:

                    future_t_out = outside_temperatures[row_time.hour]

                    # Checking if start and end are in the same hour, because then we have to weigh the temperature by
                    # less.
                    if historic_end.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).hour ==\
                            historic_start.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).hour:
                        historic_weight = (historic_end -
                                           historic_start).seconds // 60
                    else:
                        historic_weight = historic_end.astimezone(
                            tz=pytz.timezone(cfg["Pytz_Timezone"])).minute
                    if future_start.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).hour ==\
                            future_end.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).hour:
                        future_weight = (future_end -
                                         future_start).seconds // 60
                    else:
                        # the remainder of the hour.
                        future_weight = 60 - future_start.astimezone(
                            tz=pytz.timezone(cfg["Pytz_Timezone"])).minute
                    # Normalize
                    total_weight = future_weight + historic_weight
                    future_weight /= float(total_weight)
                    historic_weight /= float(total_weight)

                    outside_temperatures[row_time.hour] = future_weight * future_t_out + \
                                                          historic_weight * float(t_out)

                else:
                    outside_temperatures[row_time.hour] = float(t_out)

        # setting outside temperature data for the thermal model.
        thermal_model.set_outside_temperature(outside_temperatures)

        # ===== END: Future and past outside temperature combine =====

        if (cfg["Pricing"]["DR"] and utils.in_between(
                now.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])).time(),
                utils.get_time_datetime(cfg["Pricing"]["DR_Start"]),
                utils.get_time_datetime(cfg["Pricing"]["DR_Finish"]))):  # \
            # or now.weekday() == 4:  # TODO REMOVE ALWAYS HAVING DR ON FRIDAY WHEN DR SUBSCRIBE IS IMPLEMENTED
            DR = True
        else:
            DR = False

        adv_start = time.time()
        adv = Advise(
            [
                zone
            ],  # array because we might use more than one zone. Multiclass approach.
            now.astimezone(tz=pytz.timezone(cfg["Pytz_Timezone"])),
            occ_predictions,
            [tstat_temperature],
            thermal_model,
            prices,
            advise_cfg["Advise"]["General_Lambda"],
            advise_cfg["Advise"]["DR_Lambda"],
            DR,
            cfg["Interval_Length"],
            advise_cfg["Advise"]["MPCPredictiveHorizon"],
            advise_cfg["Advise"]["Heating_Consumption"],
            advise_cfg["Advise"]["Cooling_Consumption"],
            advise_cfg["Advise"]["Ventilation_Consumption"],
            advise_cfg["Advise"]["Thermal_Precision"],
            advise_cfg["Advise"]["Occupancy_Obs_Len_Addition"],
            building_setpoints,
            advise_cfg["Advise"]["Occupancy_Sensors"]
            if not simulate else False,
            # TODO Only using config file occupancy for now.
            safety_constraints)

        action = adv.advise()
        adv_end = time.time()

    except Exception:
        print("ERROR: For zone %s." % zone)
        print(traceback.format_exc())
        # TODO Find a better way for exceptions
        return False

    # action "0" is Do Nothing, action "1" is Heating, action "2" is Cooling
    if action == "0":
        heating_setpoint = tstat_temperature - advise_cfg["Advise"][
            "Minimum_Comfortband_Height"] / 2.
        cooling_setpoint = tstat_temperature + advise_cfg["Advise"][
            "Minimum_Comfortband_Height"] / 2.

        if heating_setpoint < safety_constraints[0][0]:
            heating_setpoint = safety_constraints[0][0]

            if (cooling_setpoint - heating_setpoint
                ) < advise_cfg["Advise"]["Minimum_Comfortband_Height"]:
                cooling_setpoint = min(
                    safety_constraints[0][1], heating_setpoint +
                    advise_cfg["Advise"]["Minimum_Comfortband_Height"])

        elif cooling_setpoint > safety_constraints[0][1]:
            cooling_setpoint = safety_constraints[0][1]

            if (cooling_setpoint - heating_setpoint
                ) < advise_cfg["Advise"]["Minimum_Comfortband_Height"]:
                heating_setpoint = max(
                    safety_constraints[0][0], cooling_setpoint -
                    advise_cfg["Advise"]["Minimum_Comfortband_Height"])

        # round to integers since the thermostats round internally.
        heating_setpoint = math.floor(heating_setpoint)
        cooling_setpoint = math.ceil(cooling_setpoint)

        p = {
            "override": True,
            "heating_setpoint": heating_setpoint,
            "cooling_setpoint": cooling_setpoint,
            "mode": 3
        }
        print "Doing nothing"

    # TODO Rethink how we set setpoints for heating and cooling and for DR events.
    # heating
    elif action == "1":
        heating_setpoint = tstat_temperature + 2 * advise_cfg["Advise"][
            "Hysterisis"]
        cooling_setpoint = heating_setpoint + advise_cfg["Advise"][
            "Minimum_Comfortband_Height"]

        if cooling_setpoint > safety_constraints[0][1]:
            cooling_setpoint = safety_constraints[0][1]

            # making sure we are in the comfortband
            if (cooling_setpoint - heating_setpoint
                ) < advise_cfg["Advise"]["Minimum_Comfortband_Height"]:
                heating_setpoint = max(
                    safety_constraints[0][0], cooling_setpoint -
                    advise_cfg["Advise"]["Minimum_Comfortband_Height"])

        # round to integers since the thermostats round internally.
        heating_setpoint = math.ceil(heating_setpoint)
        cooling_setpoint = math.ceil(cooling_setpoint)

        p = {
            "override": True,
            "heating_setpoint": heating_setpoint,
            "cooling_setpoint": cooling_setpoint,
            "mode": 3
        }
        print "Heating"

    # cooling
    elif action == "2":
        cooling_setpoint = tstat_temperature - 2 * advise_cfg["Advise"][
            "Hysterisis"]
        heating_setpoint = cooling_setpoint - advise_cfg["Advise"][
            "Minimum_Comfortband_Height"]

        if heating_setpoint < safety_constraints[0][0]:
            heating_setpoint = safety_constraints[0][0]

            # making sure we are in the comfortband
            if (cooling_setpoint - heating_setpoint
                ) < advise_cfg["Advise"]["Minimum_Comfortband_Height"]:
                cooling_setpoint = min(
                    safety_constraints[0][1], heating_setpoint +
                    advise_cfg["Advise"]["Minimum_Comfortband_Height"])

        # round to integers since the thermostats round internally.
        heating_setpoint = math.floor(heating_setpoint)
        cooling_setpoint = math.floor(cooling_setpoint)

        p = {
            "override": True,
            "heating_setpoint": heating_setpoint,
            "cooling_setpoint": cooling_setpoint,
            "mode": 3
        }
        print "Cooling"
    else:
        print "Problem with action."
        return False, None

    print("Zone: " + zone + ", action: " + str(p))

    # Plot the MPC graph.
    if advise_cfg["Advise"]["Print_Graph"]:
        adv.g_plot(zone)

    # Log the information related to the current MPC
    Debugger.debug_print(now,
                         building,
                         zone,
                         adv,
                         safety_constraints,
                         prices,
                         building_setpoints,
                         adv_end - adv_start,
                         file=True)

    # try to commit the changes to the thermostat, if it doesnt work 10 times in a row ignore and try again later
    for i in range(advise_cfg["Advise"]["Thermostat_Write_Tries"]):
        try:
            if not debug and not simulate:
                tstat.write(p)
            # Setting last action in the thermal model after we have succeeded in writing to the tstat.
            thermal_model.set_last_action(int(action))
            break
        except:
            if i == advise_cfg["Advise"]["Thermostat_Write_Tries"] - 1:
                e = sys.exc_info()[0]
                print e
                return False, None
            continue

    return True, p
Beispiel #25
0
def main_loop(args):
    print(args)

    settings = Settings.Settings(args)
    history = History.History(settings)
    connection = Connection.Connection(settings, history)
    #if connection.failed: return -1
    if connection.hard_stop: return -1

    cropscoordinates = CropsCoordinates.CropsCoordinates(settings, history)
    videocapture = VideoCapture.VideoCapture(settings, history)
    evaluation = Evaluation.Evaluation(settings, connection, cropscoordinates,
                                       history)
    attentionmodel = AttentionModel.AttentionModel(settings, cropscoordinates,
                                                   evaluation, history)
    postprocess = Postprocess.Postprocess(settings, history)

    renderer = Renderer.Renderer(settings, history)
    debugger = Debugger.Debugger(settings, cropscoordinates, evaluation)

    settings.save_settings()
    settings.set_debugger(debugger)

    for frame, next_frames, frame_number in videocapture.frame_generator_thread_loading(
    ):
        settings.frame_number = frame_number

        print("frame: ", frame[2])
        for i in range(len(next_frames)):
            print("next_frames", i, ": ", next_frames[i][2], next_frames[i][0],
                  next_frames[i][2:])

        attention_coordinates = cropscoordinates.get_crops_coordinates(
            'attention')
        #debugger.debug_coordinates_in_frame(attention_coordinates, frame[1],'attention')

        attention_evaluation = evaluation.evaluate_attention_with_precomputing(
            frame_number, attention_coordinates, frame, 'attention',
            next_frames)
        # attention_evaluation start in attention crops space (size of frame downscaled for attention evaluation
        # so that we can cut crops of 608x608 from it easily)

        projected_evaluation = cropscoordinates.project_evaluation_back(
            attention_evaluation, 'attention')
        #debugger.debug_evaluation_to_bboxes_after_reprojection(projected_evaluation, frame[1], 'attention', 'afterRepro')
        # projected_evaluation are now in original image space

        evaluation_coordinates = cropscoordinates.get_crops_coordinates(
            'evaluation')
        # evaluation_coordinates are in evaluation space. (size of frame downscaled for regular evaluation
        # so that we can cut crops of 608x608 from it easily)
        #debugger.debug_coordinates_in_frame(evaluation_coordinates, frame[1], 'evaluation')

        active_coordinates = attentionmodel.get_active_crops_intersections(
            projected_evaluation, evaluation_coordinates, frame)
        #debugger.debug_coordinates_in_frame(active_coordinates, frame[1], 'evaluation', "__"+str(settings.frame_number)+'activeonly')

        if len(active_coordinates) == 0:
            print("Nothing left active - that's possibly ok, skip")
            renderer.render([], frame)
            history.report_skipped_final_evaluation(frame_number)
            continue

        final_evaluation = evaluation.evaluate(active_coordinates, frame,
                                               'evaluation', frame_number)
        # evaluation are in evaluation space
        projected_final_evaluation = cropscoordinates.project_evaluation_back(
            final_evaluation, 'evaluation')
        # projected back to original space

        projected_active_coordinates = cropscoordinates.project_coordinates_back(
            active_coordinates, 'evaluation')

        processed_evaluations = postprocess.postprocess(
            projected_active_coordinates, projected_final_evaluation)
        #debugger.debug_evaluation_to_bboxes_after_reprojection(processed_evaluations, frame[1], 'finalpostprocessed'+frame[0][-8:-4])

        renderer.render(processed_evaluations, frame)

    history.tick_loop(frame_number, True)

    history.save_whole_history_and_settings()
Beispiel #26
0
    entity = Entity(500, 500, 14,"Wood", table[14])						# make wood entity and entity group
    entitysprite.add(entity)

    enemysprite = pygame.sprite.Group()
    playersprite = createPlayerSprite(500,350, playerTable)	# create sprite group for player
    player = playersprite.sprites()[0]
    
    enemy = Enemy(600, 600,player, enemyTable)
    enemysprite.add(enemy)

    enemy = Enemy(400, 600,player, enemyTable)
    enemysprite.add(enemy)

    inv = Inv(400, 300, "inventory2.png",table, player)
    
    debugger = Debugger(player)
    test = pygame.image.load("background.png").convert_alpha()
    button = pygame.image.load("button.png").convert_alpha()
    button2 = pygame.image.load("buttonExit.png").convert_alpha()
    pauseMenu = Menu(button, button2, screen, test)
    input = InputHandler(mapsprite, playersprite, entitysprite, enemysprite, inv, player, debugger, pauseMenu)


while True:  
    input.poll()			# check input and respond 
    collisionResponse()
    playersprite.update(0) 
    mapsprite.update(0) 		#update map
    entitysprite.update(0)
    enemysprite.update(0)
    mapsprite.draw(screen)		# draw map
Beispiel #27
0
 def debug_module_event(self, event):
     import Debugger
     debugger = Debugger.Debugger(self)
     self.run_module_event(event, debugger)
Beispiel #28
0
 def __init__(self, verbose):
     self.debugger = Debugger(verbose=verbose)
Beispiel #29
0
from Node import *
from Debugger import *


# init the debugger
debug = Debugger()
debug.enable()

class BinarySearchTree(object):
    """A Binary Search Tree Implementation:

    Attributes:
        name: A string representing the BST's name.
        Root: A root node which gets initialized to None.
    """

    def __init__(self, name):
        """Create the root node of the BST.
        """
        debug.printMsg("We Initiated a BST with no root node")
        self.name = name
        self.root = None
        self.size = 0

    def length(self):
        """Returns the length of the BST
        """
        return self.length

    def __contains__(self,key):
        """overload the *in* operator. [email protected]
Beispiel #30
0
from BinarySearchTree import *
from Node import *
from Debugger import *
import sys


debug = Debugger()
debug.disable()

debug.printMsg("hello, joel")
bst = BinarySearchTree("Joel")


bst.insert("34", 34)
bst.insert("43", 43)
bst.insert("51", 51)
bst.insert("12", 12)
print "++" * 10

if "43" in bst:
	print "in here"

# print res.data
 def __init__(self, address, port):
     Debugger.printAction( 'Initializing Comm' )
     if Config.ENABLE_COMM:
         Debugger.printAction( 'Attempting to open bluetooth socket at '+str(address)+', port '+str(port) )
         Debugger.printAction( 'Wait...')
         self.sock = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
         try:
             self.sock.connect( (address, port) )
         except IOError:
             print '\nERROR: Unable to open bluetooth socket, the application will terminate with -1 status code'
             sys.exit(-1)
             
         Debugger.printAction( 'Success!')
     else:
         Debugger.printAction( 'Attempting to open DUMMY bluetooth socket at '+str(address)+', port '+str(port) )
         Debugger.printAction( 'Wait...')
         time.sleep(0.5)
         Debugger.printAction( 'Success!')
Beispiel #32
0
    def __init__(self, color_scheme='NoColor', call_pdb=0):
        # Whether to call the interactive pdb debugger after printing
        # tracebacks or not
        self.call_pdb = call_pdb
        if call_pdb:
            self.pdb = Debugger.Pdb()
        else:
            self.pdb = None

        # Create color table
        self.ColorSchemeTable = ColorSchemeTable()

        # Populate it with color schemes
        C = TermColors  # shorthand and local lookup
        self.ColorSchemeTable.add_scheme(
            ColorScheme(
                'NoColor',
                # The color to be used for the top line
                topline=C.NoColor,

                # The colors to be used in the traceback
                filename=C.NoColor,
                lineno=C.NoColor,
                name=C.NoColor,
                vName=C.NoColor,
                val=C.NoColor,
                em=C.NoColor,

                # Emphasized colors for the last frame of the traceback
                normalEm=C.NoColor,
                filenameEm=C.NoColor,
                linenoEm=C.NoColor,
                nameEm=C.NoColor,
                valEm=C.NoColor,

                # Colors for printing the exception
                excName=C.NoColor,
                line=C.NoColor,
                caret=C.NoColor,
                Normal=C.NoColor))

        # make some schemes as instances so we can copy them for modification easily:
        self.ColorSchemeTable.add_scheme(
            ColorScheme(
                'Linux',
                # The color to be used for the top line
                topline=C.LightRed,

                # The colors to be used in the traceback
                filename=C.Green,
                lineno=C.Green,
                name=C.Purple,
                vName=C.Cyan,
                val=C.Green,
                em=C.LightCyan,

                # Emphasized colors for the last frame of the traceback
                normalEm=C.LightCyan,
                filenameEm=C.LightGreen,
                linenoEm=C.LightGreen,
                nameEm=C.LightPurple,
                valEm=C.LightBlue,

                # Colors for printing the exception
                excName=C.LightRed,
                line=C.Yellow,
                caret=C.White,
                Normal=C.Normal))

        # For light backgrounds, swap dark/light colors
        self.ColorSchemeTable.add_scheme(
            ColorScheme(
                'LightBG',
                # The color to be used for the top line
                topline=C.Red,

                # The colors to be used in the traceback
                filename=C.LightGreen,
                lineno=C.LightGreen,
                name=C.LightPurple,
                vName=C.Cyan,
                val=C.LightGreen,
                em=C.Cyan,

                # Emphasized colors for the last frame of the traceback
                normalEm=C.Cyan,
                filenameEm=C.Green,
                linenoEm=C.Green,
                nameEm=C.Purple,
                valEm=C.Blue,

                # Colors for printing the exception
                excName=C.Red,
                #line = C.Brown,  # brown often is displayed as yellow
                line=C.Red,
                caret=C.Normal,
                Normal=C.Normal))

        self.set_colors(color_scheme)
        self.old_scheme = color_scheme  # save initial value for toggles
 def __init__(self, settings):
     self.settings = settings
     self.debugger = Debugger.Debugger(settings)
Beispiel #34
0
 def open_debugger(self):
     import Debugger
     self.interp.setdebugger(Debugger.Debugger(self))
     sys.ps1 = "[DEBUG ON]\n>>> "
     self.showprompt()
     self.set_debugger_indicator()