def __init__(self, parent, gui, **options): """TkBlock is a base class. Any class containing Tkinter objects should extend it. - initializes common attributes - creates a wrapped widget which applies the settings from the theme profile - provides generic views for data - provides open and close functions @param parent @param gui @param options """ self.gui = gui self.parent = parent self.column, self.row, self.columnspan, self.rowspan, self.padx, self.pady, self.sticky, self.scrollable, self.rowweight, self.columnweight, self.menuindex, self.width, self.height = options[ 'column'], options['row'], options['columnspan'], options[ 'rowspan'], options['padx'], options['pady'], options[ 'sticky'], options['scrollable'], options[ 'rowweight'], options['columnweight'], options[ 'menuindex'], options['width'], options['height'] self.widgets, self.variables = {}, {} self.notifier = Notifier() # collect theme information from the gui self.colours = self.gui.colours self.fonts = self.gui.fonts self.images = self.gui.images self.initWidget() self.setup()
def newCategory(self, categoryName, logger=None): if not self.__categories.has_key(categoryName): self.__categories[categoryName] = Notifier.Notifier(categoryName, logger) self.setDconfigLevel(categoryName) else: print "Warning: DirectNotify: category '%s' already exists" % categoryName return self.getCategory(categoryName)
def main(): app = QGuiApplication(argv) qmlRegisterType(Manager.Manager, 'BTManager', 1, 0, 'BTManager') qmlRegisterType(Notifier.Notifier, 'BTNotifier', 1, 0, 'BTNotifier') qmlRegisterType(Device.Device, 'Device', 1, 0, 'Device') print('Create my device') notifier = Notifier.Notifier() manager = Manager.Manager() manager.set_notifier(notifier) print('Bluetooth manager create') path = os.path.dirname(__file__) print('Detect run path') print('Run GUI') engine = QQmlApplicationEngine() engine.rootContext().setContextProperty('AppPath', path) engine.rootContext().setContextProperty('MyDevice', manager.my_device) engine.rootContext().setContextProperty('BTManager', manager) engine.rootContext().setContextProperty('BTNotifier', notifier) engine.load('ui/Main.qml') print('Start search for near by devices') manager.search(True) print('Execute app') exit(app.exec_())
def __init__(self, parent, dependencies, module, gui): """ Initializes the TkDependencyManager object example values for dependencies: single dependency: { 'package': 'nmap', 'installer': 'apt-get' } multi dependency: [{ 'package': 'tk8.5-dev', 'installer': 'apt-get' }, { 'package': 'pillow', 'installer': 'pip', 'version': 'latest' }] @param parent Frame @param dependencies dict or list of dicts @param module: the name of the module which initialized the package manager @param gui reference to the GUI object """ self.widgets = {} self.variables = {} if(isinstance(dependencies, dict)): self.dependencies = [dependencies] elif(isinstance(dependencies, list)): self.dependencies = dependencies else: self.dependencies = dependencies self.module = module self.gui = gui self.notifier = Notifier() self.aptGet = { 'install': ['sudo','apt-get','-q','-y','install'], 'remove': ['sudo','apt-get','-q','-y','remove'] } self.pip = { 'install': ['sudo', 'pip','-q', 'install'], 'remove': ['sudo', 'pip','-q', 'uninstall'] } self.loadCache() self.colours = self.gui.colours self.images = self.gui.images self.fonts = self.gui.fonts self.widget = Frame(parent,bg=self.colours['bg'], borderwidth=0, highlightthickness=0) self.widget.grid(column=0,row=0,sticky='EW')
def newCategory(self, categoryName, logger=None): if categoryName not in self._DirectNotify__categories: self._DirectNotify__categories[categoryName] = Notifier.Notifier( categoryName, logger) self.setDconfigLevel(categoryName) return self.getCategory(categoryName)
def setUp(self): self._data_manager = DataManager.DataManager() self._monitor_manager = MonitorManager.MonitorManager() self._monitor_1 = RamByteMonitor.RamByteMonitor() self._monitor_manager.add_monitor(self._monitor_1) self._notifier = Notifier.Notifier("shizuka.client.Mulder", testing=True)
def newCategory(self, categoryName, logger=None): """newCategory(self, string) Make a new notify category named categoryName. Return new category if no such category exists, else return existing category """ if (categoryName not in self.__categories): self.__categories[categoryName] = Notifier.Notifier(categoryName, logger) self.setDconfigLevel(categoryName) return (self.getCategory(categoryName))
def loadModules(self): if self.moduleName == "notifier": self.module = Notifier() elif self.moduleName == "none": self.module = AFK() if self.module == None: return False return True
def newCategory(self, categoryName, logger=None): """newCategory(self, string) Make a new notify category named categoryName. Return new category if no such category exists, else return existing category """ if (not self.__categories.has_key(categoryName)): self.__categories[categoryName] = Notifier.Notifier( categoryName, logger) self.setDconfigLevel(categoryName) else: print "Warning: DirectNotify: category '%s' already exists" % \ (categoryName) return (self.getCategory(categoryName))
def __init__(self, name, callback, stopped=False, interval=0.1): """ Initializes a GenericThread object """ threading.Thread.__init__(self) if (name != None): self.setName(name) self.callback = callback self.stopped = stopped if (isinstance(interval, (int, float, long)) and interval > 0.0): self.interval = interval else: self.interval = 0.1 self.notifier = Notifier() self.daemon = True threading.Thread.start(self)
def __init__(self, parent, gui, **options): """ Initializes TkNotifier object @param parent @param gui @param options """ super(TkNotifier,self).__init__(parent, gui, **options) self.now = lambda: int(round(time.time() * 1000)) self.displayed = [] self.firstrun = True self.notifier = Notifier(log=Setting.get('notifier_log', False)) self.notifier.setCallback(self.update) self.addNotifier() self.notifier.addNotice('Welcome to AllMyServos') self.gui.scheduler.addTask('notifier_cleanup', self.cleanup, 15)
def __init__(self, scheduler=None, kbthread=None, notifier=None): """ Initializes the camera object @param scheduler @param kbthread @param notifier """ self.now = lambda: int(round(time.time() * 1000)) if (scheduler != None): self.scheduler = scheduler else: self.scheduler = Scheduler.GetInstance() self.kbthread = Keyboard.KeyboardThread.GetInstance() if (notifier != None): self.notifier = notifier else: self.notifier = Notifier() self.viewfinder = { 'enabled': False, 'visible': False, 'window': (0, 0, 320, 240), 'fullscreen': False, 'element': None } self.patterns = { 'nic': re.compile(r'(?P<name>[^\s]+).?'), 'addr': re.compile(r'\s*inet\saddr:(?P<ip>[^\s]+).*'), 'overscan': re.compile(r'[^#]?disable_overscan=(?P<overscan>\d+).*'), 'overscan_left': re.compile(r'[^#]?overscan_left=(?P<left>\d+)'), 'overscan_right': re.compile(r'[^#]?overscan_right=(?P<right>\d+)'), 'overscan_top': re.compile(r'[^#]?overscan_top=(?P<top>\d+)'), 'overscan_bottom': re.compile(r'[^#]?overscan_bottom=(?P<bottom>\d+)'), 'start_x': re.compile(r'[^#]?start_x=(?P<start_x>\d+)'), 'gpu_mem': re.compile(r'[^#]?gpu_mem=(?P<gpu_mem>\d+)'), } self.initProfile() self.initInfo() self.initKb() self.callbacks = {} self.scheduler.addTask('cam_watcher', self.check, interval=0.5, stopped=not Setting.get('cam_autostart', False))
def __init__(self): """ Initializes the Command Line Interface """ self.s = Setting() self.notifier = Notifier() self.printLicense() self.initScheduler() self.initTrayIcon() self.initSpec() self.initMotionScheduler() self.initKbThread() self.initRPC() self.initCamera() self.printFooter() try: while(True): time.sleep(100) except KeyboardInterrupt: self.shutDown()
def main(): import MessageHandler import Notifier import threading logger.setLevel(logging.INFO) logger.info("Instantiating the different components we need.") #Instantiating the different components we need. client = Client() monman = MonitorManager.MonitorManager() cexec = CommandInterface.CommandInterface() notifier = Notifier.Notifier(client.get_client_id()) messagehandler = MessageHandler.MessageHandler(client.get_client_id()) logger.info("Setting the outgoing message queue.") #Setting the outgoing message queue client.set_message_queue(messagehandler) monman.set_message_queue(messagehandler) ##TODO THIS SHIT IS TEMPORARY. MAKE A LOCK FACTORY CLASS OR SOMETHING. ##SETTING LOCKS FOR THE MONITORS SO WHEN THEY ARE BEING MODIFIED THEY CANT BE POLLED. lock = threading.RLock() monman.set_lock(lock) notifier._data_manager.set_lock(lock) logger.info("Giving client access to crucial components") #Giving client access to crucial components client.set_monitor_manager(monman) client.set_command_executor(cexec) client.set_notifier(notifier) #making the client visible on the nameserver client.register_to_name_server() #Sending a "Hey I"m here!" message to the server. client.send_discovery() #Starting the outgoing message queue messagehandler.start() #Beginning the monitoring cycle. client.begin_monitoring()
def __init__(self, scheduler=None): """ Initializes a Network object @param scheduler """ if (scheduler != None): self.scheduler = scheduler else: self.scheduler = Scheduler.GetInstance() self.patterns = {} self.nodes = {} self.__initPatterns() self.nmapcachepath = os.path.join(Specification.Specification.filebase, 'network') self.nmapcachefile = 'nmap_cache.txt' self.nmapcommand = ['nmap', '192.168.0.0/24'] self.ifconfigcommand = ['ifconfig'] self.nics = [] self.myip = None self.ifconfigraw = None self.nmapraw = None self.mapping = False self.notifier = Notifier() self.scheduler.addTask('network_mapper', self.update, 30)
def test_data_is_received_when_server_is_associated(self): self.server.register_to_name_server() notifier = Notifier.Notifier("shizuka.client.Mulder") results = notifier.get_polled_data() transmission_result = notifier.post_to_server(results) self.assertTrue(transmission_result)
def __init__(self): """ Initializes the Scheduler object """ self.tasks = {} self.notifier = Notifier()
def notifier(db): notifier = Notifier.Notifier(db) return notifier
import Notifier import StatusMonitor if __name__ == '__main__': # arguments if len(sys.argv) < 5: print( '''Usage: python amtrakstatusnotifier.py <train number> <station> <admin email> <email 1> [email 2]''' ) sys.exit(0x2fe3) trainNumber = int(sys.argv[1]) station = sys.argv[2] adminAddress = sys.argv[3] addresses = sys.argv[4:] # Monitor try: monitor = StatusMonitor.StatusMonitor() monitor.run(trainNumber, station, addresses) except Exception as error: traceback = traceback.TracebackException(type(error), error, error.__traceback__) message = 'Amtrak Status notifier raised an exception: {}\n\n{}'.format( error, ''.join(traceback.format())) # Save to file with open('error.log', 'w') as f: f.write(message) # Send to admin notifier = Notifier.Notifier() notifier.notify(None, adminAddress, 'Error!', message)
def run(self): try: _logger.debug('%s Start %s' % (self.pandaID,self.job.jobStatus)) flagComplete = True topUserDsList = [] usingMerger = False disableNotifier = False firstIndvDS = True finalStatusDS = [] for destinationDBlock in self.destinationDBlocks: dsList = [] _logger.debug('%s start %s' % (self.pandaID,destinationDBlock)) # ignore tid datasets if re.search('_tid[\d_]+$',destinationDBlock): _logger.debug('%s skip %s' % (self.pandaID,destinationDBlock)) continue # ignore HC datasets if re.search('^hc_test\.',destinationDBlock) != None or re.search('^user\.gangarbt\.',destinationDBlock) != None: if re.search('_sub\d+$',destinationDBlock) == None and re.search('\.lib$',destinationDBlock) == None: _logger.debug('%s skip HC %s' % (self.pandaID,destinationDBlock)) continue # query dataset if self.datasetMap.has_key(destinationDBlock): dataset = self.datasetMap[destinationDBlock] else: dataset = self.taskBuffer.queryDatasetWithMap({'name':destinationDBlock}) if dataset == None: _logger.error('%s Not found : %s' % (self.pandaID,destinationDBlock)) flagComplete = False continue # skip tobedeleted/tobeclosed if dataset.status in ['cleanup','tobeclosed','completed','deleted']: _logger.debug('%s skip %s due to %s' % (self.pandaID,destinationDBlock,dataset.status)) continue dsList.append(dataset) # sort dsList.sort() # count number of completed files notFinish = self.taskBuffer.countFilesWithMap({'destinationDBlock':destinationDBlock, 'status':'unknown'}) if notFinish < 0: _logger.error('%s Invalid DB return : %s' % (self.pandaID,notFinish)) flagComplete = False continue # check if completed _logger.debug('%s notFinish:%s' % (self.pandaID,notFinish)) if self.job.destinationSE == 'local' and self.job.prodSourceLabel in ['user','panda']: # close non-DQ2 destinationDBlock immediately finalStatus = 'closed' elif self.job.lockedby == 'jedi' and self.isTopLevelDS(destinationDBlock): # set it closed in order not to trigger DDM cleanup. It will be closed by JEDI finalStatus = 'closed' elif self.job.prodSourceLabel in ['user'] and "--mergeOutput" in self.job.jobParameters \ and self.job.processingType != 'usermerge': # merge output files if firstIndvDS: # set 'tobemerged' to only the first dataset to avoid triggering many Mergers for --individualOutDS finalStatus = 'tobemerged' firstIndvDS = False else: finalStatus = 'tobeclosed' # set merging to top dataset usingMerger = True # disable Notifier disableNotifier = True elif self.job.produceUnMerge(): finalStatus = 'doing' else: # set status to 'tobeclosed' to trigger DQ2 closing finalStatus = 'tobeclosed' if notFinish == 0 and EventServiceUtils.isEventServiceMerge(self.job): allInJobsetFinished = self.checkSubDatasetsInJobset() else: allInJobsetFinished = True if notFinish == 0 and allInJobsetFinished: _logger.debug('%s set %s to dataset : %s' % (self.pandaID,finalStatus,destinationDBlock)) # set status dataset.status = finalStatus # update dataset in DB retT = self.taskBuffer.updateDatasets(dsList,withLock=True,withCriteria="status<>:crStatus AND status<>:lockStatus ", criteriaMap={':crStatus':finalStatus,':lockStatus':'locked'}) if len(retT) > 0 and retT[0]==1: finalStatusDS += dsList # close user datasets if self.job.prodSourceLabel in ['user'] and self.job.destinationDBlock.endswith('/') \ and (dataset.name.startswith('user') or dataset.name.startswith('group')): # get top-level user dataset topUserDsName = re.sub('_sub\d+$','',dataset.name) # update if it is the first attempt if topUserDsName != dataset.name and not topUserDsName in topUserDsList and self.job.lockedby != 'jedi': topUserDs = self.taskBuffer.queryDatasetWithMap({'name':topUserDsName}) if topUserDs != None: # check status if topUserDs.status in ['completed','cleanup','tobeclosed','deleted', 'tobemerged','merging']: _logger.debug('%s skip %s due to status=%s' % (self.pandaID,topUserDsName,topUserDs.status)) else: # set status if self.job.processingType.startswith('gangarobot') or \ self.job.processingType.startswith('hammercloud'): # not trigger freezing for HC datasets so that files can be appended topUserDs.status = 'completed' elif not usingMerger: topUserDs.status = finalStatus else: topUserDs.status = 'merging' # append to avoid repetition topUserDsList.append(topUserDsName) # update DB retTopT = self.taskBuffer.updateDatasets([topUserDs],withLock=True,withCriteria="status<>:crStatus", criteriaMap={':crStatus':topUserDs.status}) if len(retTopT) > 0 and retTopT[0]==1: _logger.debug('%s set %s to top dataset : %s' % (self.pandaID,topUserDs.status,topUserDsName)) else: _logger.debug('%s failed to update top dataset : %s' % (self.pandaID,topUserDsName)) # get parent dataset for merge job if self.job.processingType == 'usermerge': tmpMatch = re.search('--parentDS ([^ \'\"]+)',self.job.jobParameters) if tmpMatch == None: _logger.error('%s failed to extract parentDS' % self.pandaID) else: unmergedDsName = tmpMatch.group(1) # update if it is the first attempt if not unmergedDsName in topUserDsList: unmergedDs = self.taskBuffer.queryDatasetWithMap({'name':unmergedDsName}) if unmergedDs == None: _logger.error('%s failed to get parentDS=%s from DB' % (self.pandaID,unmergedDsName)) else: # check status if unmergedDs.status in ['completed','cleanup','tobeclosed']: _logger.debug('%s skip %s due to status=%s' % (self.pandaID,unmergedDsName,unmergedDs.status)) else: # set status unmergedDs.status = finalStatus # append to avoid repetition topUserDsList.append(unmergedDsName) # update DB retTopT = self.taskBuffer.updateDatasets([unmergedDs],withLock=True,withCriteria="status<>:crStatus", criteriaMap={':crStatus':unmergedDs.status}) if len(retTopT) > 0 and retTopT[0]==1: _logger.debug('%s set %s to parent dataset : %s' % (self.pandaID,unmergedDs.status,unmergedDsName)) else: _logger.debug('%s failed to update parent dataset : %s' % (self.pandaID,unmergedDsName)) # start Activator if re.search('_sub\d+$',dataset.name) == None: if self.job.prodSourceLabel=='panda' and self.job.processingType in ['merge','unmerge']: # don't trigger Activator for merge jobs pass else: if self.job.jobStatus == 'finished': aThr = Activator(self.taskBuffer,dataset) aThr.start() aThr.join() else: # unset flag since another thread already updated #flagComplete = False pass else: # update dataset in DB self.taskBuffer.updateDatasets(dsList,withLock=True,withCriteria="status<>:crStatus AND status<>:lockStatus ", criteriaMap={':crStatus':finalStatus,':lockStatus':'locked'}) # unset flag flagComplete = False # end _logger.debug('%s end %s' % (self.pandaID,destinationDBlock)) # special actions for vo if flagComplete: closerPluginClass = panda_config.getPlugin('closer_plugins',self.job.VO) if closerPluginClass == None and self.job.VO == 'atlas': # use ATLAS plugin for ATLAS from CloserAtlasPlugin import CloserAtlasPlugin closerPluginClass = CloserAtlasPlugin if closerPluginClass != None: closerPlugin = closerPluginClass(self.job,finalStatusDS,_logger) closerPlugin.execute() # change pending jobs to failed finalizedFlag = True if flagComplete and self.job.prodSourceLabel=='user': _logger.debug('%s finalize %s %s' % (self.pandaID,self.job.prodUserName,self.job.jobDefinitionID)) finalizedFlag = self.taskBuffer.finalizePendingJobs(self.job.prodUserName,self.job.jobDefinitionID) _logger.debug('%s finalized with %s' % (self.pandaID,finalizedFlag)) # update unmerged datasets in JEDI to trigger merging if flagComplete and self.job.produceUnMerge() and finalStatusDS != []: if finalizedFlag: tmpStat = self.taskBuffer.updateUnmergedDatasets(self.job,finalStatusDS) _logger.debug('%s updated unmerged datasets with %s' % (self.pandaID,tmpStat)) # start notifier _logger.debug('%s source:%s complete:%s' % (self.pandaID,self.job.prodSourceLabel,flagComplete)) if (self.job.jobStatus != 'transferring') and ((flagComplete and self.job.prodSourceLabel=='user') or \ (self.job.jobStatus=='failed' and self.job.prodSourceLabel=='panda')) and \ self.job.lockedby != 'jedi': # don't send email for merge jobs if (not disableNotifier) and not self.job.processingType in ['merge','unmerge']: useNotifier = True summaryInfo = {} # check all jobDefIDs in jobsetID if not self.job.jobsetID in [0,None,'NULL']: useNotifier,summaryInfo = self.taskBuffer.checkDatasetStatusForNotifier(self.job.jobsetID,self.job.jobDefinitionID, self.job.prodUserName) _logger.debug('%s useNotifier:%s' % (self.pandaID,useNotifier)) if useNotifier: _logger.debug('%s start Notifier' % self.pandaID) nThr = Notifier.Notifier(self.taskBuffer,self.job,self.destinationDBlocks,summaryInfo) nThr.run() _logger.debug('%s end Notifier' % self.pandaID) _logger.debug('%s End' % self.pandaID) except: errType,errValue = sys.exc_info()[:2] _logger.error("%s %s" % (errType,errValue))