def _db_addTaskToQueue(self, task, index = True): """ Submits a new task """ SchedulerModule.getDBInstance().addTaskToWaitingQueue(task, index = index) self._logger.info("Task %s queued for execution" % task)
def _check(args): if not os.path.isdir('/proc'): raise Exception('This command only works on systems that have /proc/') with DBMgr.getInstance().global_connection(): status = Client().getStatus() if status['hostname'] is not None and status['hostname'] != socket.getfqdn(): print >>sys.stderr, 'The daemon is running on another machine ({0[hostname]})'.format(status) sys.exit(2) db_running = _check_running(False) os_running = _check_running(True) if not args.quiet: print >>sys.stderr, 'Database status: running={1}, host={0[hostname]}, pid={0[pid]}'.format( status, db_running) print >>sys.stderr, 'Process status: running={0}'.format(os_running) if db_running and os_running: print status['pid'] sys.exit(0) elif not db_running and not os_running: sys.exit(1) elif db_running and not os_running: if not args.quiet: print >>sys.stderr, 'Marking dead scheduler as not running' SchedulerModule.getDBInstance().setSchedulerRunningStatus(False) DBMgr.getInstance().commit() sys.exit(1) else: print >>sys.stderr, 'Unexpected state! Process is running, but scheduler is not marked as running' sys.exit(2)
def _db_addTaskToQueue(self, task, index=True): """ Submits a new task """ SchedulerModule.getDBInstance().addTaskToWaitingQueue(task, index=index) self._logger.info("Task %s queued for execution" % task)
def _check(args): if not os.path.isdir('/proc'): raise Exception('This command only works on systems that have /proc/') with DBMgr.getInstance().global_connection(): status = Client().getStatus() if status['hostname'] is not None and status[ 'hostname'] != socket.getfqdn(): print >> sys.stderr, 'The daemon is running on another machine ({0[hostname]})'.format( status) sys.exit(2) db_running = _check_running(False) os_running = _check_running(True) if not args.quiet: print >> sys.stderr, 'Database status: running={1}, host={0[hostname]}, pid={0[pid]}'.format( status, db_running) print >> sys.stderr, 'Process status: running={0}'.format( os_running) if db_running and os_running: print status['pid'] sys.exit(0) elif not db_running and not os_running: sys.exit(1) elif db_running and not os_running: if not args.quiet: print >> sys.stderr, 'Marking dead scheduler as not running' SchedulerModule.getDBInstance().setSchedulerRunningStatus(False) DBMgr.getInstance().commit() sys.exit(1) else: print >> sys.stderr, 'Unexpected state! Process is running, but scheduler is not marked as running' sys.exit(2)
def _run(args): _setup(args) formatter = logging.Formatter("%(asctime)s %(name)s - %(levelname)s %(filename)s:%(lineno)s: %(message)s") root = logging.getLogger('') handler = logging.StreamHandler() handler.setFormatter(formatter) root.addHandler(handler) dbi = DBMgr.getInstance(max_disconnect_poll=40) dbi.startRequest() info = HelperMaKaCInfo.getMaKaCInfoInstance() useRBDB = info.getRoomBookingModuleActive() if useRBDB: DALManager.connect() sm = SchedulerModule.getDBInstance() t = sm.getTaskById(args.taskid) t.plugLogger(logging.getLogger('console.run/%s' % args.taskid)) t.run() if useRBDB: DALManager.commit() DALManager.disconnect() dbi.endRequest()
def _run(args): _setup(args) formatter = logging.Formatter( "%(asctime)s %(name)s - %(levelname)s %(filename)s:%(lineno)s: %(message)s" ) root = logging.getLogger('') handler = logging.StreamHandler() handler.setFormatter(formatter) root.addHandler(handler) dbi = DBMgr.getInstance(max_disconnect_poll=40) dbi.startRequest() info = HelperMaKaCInfo.getMaKaCInfoInstance() useRBDB = info.getRoomBookingModuleActive() if useRBDB: DALManager.connect() sm = SchedulerModule.getDBInstance() t = sm.getTaskById(args.taskid) t.plugLogger(logging.getLogger('console.run/%s' % args.taskid)) t.run() if useRBDB: DALManager.commit() DALManager.disconnect() dbi.endRequest()
def setUp(self): super(_TestScheduler, self).setUp() with self._context('database'): self._smodule = SchedulerModule.getDBInstance() self._sched = SchedulerThread(self._mode) self._sched.start()
def __init__(self, **config): """ config is a dictionary containing configuration parameters """ self._readConfig(config) self._logger = logging.getLogger('scheduler') self._dbi = DBMgr.getInstance() self._dbi.startRequest() self._schedModule = SchedulerModule.getDBInstance() self._runningWorkers = {}
def __init__(self, **config): """ config is a dictionary containing configuration parameters """ self._readConfig(config) self._logger = logging.getLogger('scheduler') self._dbi = db.DBMgr.getInstance() self._dbi.startRequest() self._schedModule = SchedulerModule.getDBInstance() self._runningWorkers = {}
def _prepare(self): """ This acts as a second 'constructor', that is executed in the context of the thread (due to database reasons) """ self._prepareDB() self._dbi.startRequest() self._delayed = False with self._dbi.transaction(): schedMod = SchedulerModule.getDBInstance() self._task = schedMod.getTaskById(self._taskId) # open a logging channel self._task.plugLogger(self._logger) # XXX: potentially conflict-prone with self._dbi.transaction(sync=True): self._task.prepare()
def _run(args): _setup(args) formatter = logging.Formatter("%(asctime)s %(name)s - %(levelname)s %(filename)s:%(lineno)s: %(message)s") root = logging.getLogger('') handler = logging.StreamHandler() handler.setFormatter(formatter) root.addHandler(handler) dbi = DBMgr.getInstance(max_disconnect_poll=40) dbi.startRequest() sm = SchedulerModule.getDBInstance() t = sm.getTaskById(args.taskid) t.plugLogger(logging.getLogger('console.run/%s' % args.taskid)) t.run() dbi.endRequest()
def _run(args): _setup(args) formatter = logging.Formatter( "%(asctime)s %(name)s - %(levelname)s %(filename)s:%(lineno)s: %(message)s" ) root = logging.getLogger('') handler = logging.StreamHandler() handler.setFormatter(formatter) root.addHandler(handler) dbi = DBMgr.getInstance(max_disconnect_poll=40) dbi.startRequest() sm = SchedulerModule.getDBInstance() t = sm.getTaskById(args.taskid) t.plugLogger(logging.getLogger('console.run/%s' % args.taskid)) with make_app(True).app_context(): t.run() dbi.endRequest()
def _prepare(self): """ This acts as a second 'constructor', that is executed in the context of the thread (due to database reasons) """ self._prepareDB() self._dbi.startRequest() with self._dbi.transaction() as conn: schedMod = SchedulerModule.getDBInstance() self._task = schedMod.getTaskById(self._taskId) info = HelperMaKaCInfo.getMaKaCInfoInstance() self._rbEnabled = info.getRoomBookingModuleActive() if self._rbEnabled: self._rbdbi = DALManager.getInstance() self._rbdbi.connect() else: self._rbdbi = DALManager.dummyConnection() # open a logging channel self._task.plugLogger(self._logger)
def __init__(self): super(Client, self).__init__() self._schedMod = SchedulerModule.getDBInstance()