def start(self): """ Starts the proxy running. This must be called from within the process where the proxy is going to be used. """ ThreadQueueBase.__init__(self, name="SettingsManagerProxy") self.started = True
def __init__(self, settings_manager): ThreadQueueBase.__init__(self, name="CaptureManager") try: self._settings_manager = settings_manager self._host_manager = host.HostManager(settings_manager) # get the correct plugin for the camera home = os.path.expanduser("~") cameras_folder = home + "/.pysces_asi/cameras" camera_plugin_name = self._settings_manager.get( ["camera"])['camera'] camera.clear_plugins_list() camera.load_camera_plugins(cameras_folder) self._camera_manager = camera.cameras[ camera_plugin_name](settings_manager) self._output_task_handler = output_task_handler.OutputTaskHandler( settings_manager) except Exception as ex: traceback.print_exc() self._settings_manager.set( {"output": "CameraManager> ** ERROR! ** " + ex.args[0]}) self.exit() raise ex
def exit(self): """ Updates the settings file and kills the persistant storage class. This must be called when you are finished with the SettingsManager in order to clean up and stop the worker thread. Some doctests: >>> import threading >>> print threading.activeCount() 1 >>> s = SettingsManager() >>> print threading.activeCount() 2 >>> s.exit() >>> print threading.activeCount() 1 """ try: # kill the persistant storage self.__persistant_storage.exit() # update the settings file self.set({"output": "SettingsManager> Updating settings file"}) self.__settings_file_parser.update_settings_file(self.__variables) finally: self._stay_alive = False self._remote_input_queue.put(None) self.remote_task_thread.join() self._remote_input_queue.close() ThreadQueueBase.exit(self) print("SettingsManager has exited")
def exit(self): """ Note that the exit method only kills the proxy, not the master. However, it does remove the proxy from the master, closing the shared queue between them. """ task = RemoteTask(self.id, "destroy proxy", self.id) self.output_queue.put(task) ThreadQueueBase.exit(self)
def exit(self): """ Shuts down all the objects that this class created, and then kills its own internal worker thread. """ # kill own worker thread ThreadQueueBase.exit(self) try: self._camera_manager.exit() except AttributeError: pass try: self._output_task_handler.exit() except AttributeError: pass
def exit(self): """ Waits for all the outstanding OutputTasks to be completed then shuts down the processing pools and the internal worker thread. """ # kill own worker thread ThreadQueueBase.exit(self) print("OutputTaskHandler: Killed self") # shutdown the processing pools self._processing_pool.exit() self._pipelined_processing_pool.exit() print("OutputTaskHandler: Joined processing pools") # kill the network manager if (self._network_manager is not None): self._network_manager.exit() print("OutputTaskHandler: Killed network manager")
def __init__(self, settings_manager): ThreadQueueBase.__init__(self, name="CameraManager") try: # check that camera is connected if not self.is_connected(): raise GphotoError("No camera detected") if settings_manager.get(['camera_full_auto_clear'])['camera_full_auto_clear']: settings_manager.set( {"output": "CameraManager> Clearing camera card"}) # get the camera configs settings_manager.set( {"output": "CameraManager> Downloading settings from camera - please wait"}) self.camera_configs = self.download_configs() self.capture_mode = None except Exception as ex: self.exit() raise ex
def __init__(self, settings_manager): self.__pipelined_lock = threading.Lock() ThreadQueueBase.__init__(self, name="OutputTaskHandler", workers=multiprocessing.cpu_count( ), maxsize=multiprocessing.cpu_count() + 2) # create a processing pool to produce the outputs asyncronously - this # has as many workers as there are CPU cores self._processing_pool = ProcessQueueBase( workers=multiprocessing.cpu_count(), name="Processing Pool") # multiprocessing.cpu_count() # create a processing pool to produce outputs in the order that their respective image types # are recieved from the camera (useful for creating keograms for # example) self._pipelined_processing_pool = ProcessQueueBase( workers=1, name="Pipelined Processing Pool") # load the output creation functions home = os.path.expanduser("~") clear_plugins_list() load_output_functions(os.path.normpath(home + "/.pysces_asi/outputs")) # check to see if we need a web-server mounted try: web_server = settings_manager.get(["web_server"])['web_server'] except KeyError: web_server = None if (web_server is None): self._network_manager = None else: # create NetworkManager object to handle copying outputs to the # webserver self._network_manager = network.NetworkManager(settings_manager) self._settings_manager = settings_manager
def __init__(self): ThreadQueueBase.__init__(self, name="SettingsManager") # define method to string mappings - notice that these should be the # thread safe public methods! self._methods = { "get": self.get, "set": self.set, "create": self.create, "register": self.register, "unregister": self.unregister, "operate": self.operate, "destroy proxy": self._commit_destroy_proxy, } # self._manager = Manager() # self._manager.Queue() self._remote_input_queue = multiprocessing.Queue() # create thread to handle remote tasks self.remote_task_thread = Thread(target=self._process_remote_tasks) self.remote_task_thread.start() try: # define private attributes self.__variables = {} self.__callbacks = {} self.__callback_ids = {} self._output_queues = {} # hard code settings file location and create a parser home = os.path.expanduser("~") self.__settings_file_parser = settings_file_parser.SettingsFileParser(home + "/.pysces_asi/settings.txt") # create an output variable - this is used instead of printing to stdout, making it easier # for a top layer application (e.g. a GUI) to access this data self.__create("output", "") # load settings file settings = self.__settings_file_parser.get_settings() # store settings in variables for key in list(settings.keys()): self.__create(key, settings[key]) # create persistant storage class self.__persistant_storage = persist.PersistantStorage(home + "/.pysces_asi", self) # load persistant values into variables persistant_data = self.__persistant_storage.get_persistant_data() for key in list(persistant_data.keys()): self.__create(key, persistant_data[key], persistant=True) except Exception as ex: # if an exception occurs then we need to shut down the threads and # manager before exiting self._stay_alive = False self._remote_input_queue.put(None) self.remote_task_thread.join() self._remote_input_queue.close() ThreadQueueBase.exit(self) raise ex