def __init__(self, config, scheduler, results): self.config = config self._scheduler = scheduler self._results = results self._reports = {} # report instances, created on demand DASWebManager.__init__(self, config) self.base = config.web_base self.plotfairy = config.plotfairy_base
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) config = dasconfig['web_server'] self.pid_pat = re.compile(r'^[a-z0-9]{32}') # TODO: self.base shall be automatically included in all tmpls self.base = config['url_base'] self.interval = config.get('status_update', 2500) self.engine = config.get('engine', None) self.check_clients = config.get('check_clients', False) nworkers = config['web_workers'] self.hot_thr = config.get('hot_threshold', 3000) self.dasconfig = dasconfig self.dburi = self.dasconfig['mongodb']['dburi'] self.lifetime = self.dasconfig['mongodb']['lifetime'] self.queue_limit = config.get('queue_limit', 50) qtype = config.get('qtype', 'Queue') qfreq = config.get('qfreq', 5) if qtype not in ['Queue', 'PriorityQueue']: msg = 'Wrong queue type, qtype=%s' % qtype raise Exception(msg) # if self.engine: # thr_name = 'DASWebService:PluginTaskManager' # self.taskmgr = PluginTaskManager(bus=self.engine, \ # nworkers=nworkers, name=thr_name, qtype=qtype, \ # qfreq=qfreq) # self.taskmgr.subscribe() # else: # thr_name = 'DASWebService:TaskManager' # self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \ # qtype=qtype, qfreq=qfreq) thr_name = 'DASWebService:TaskManager' self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, \ qtype=qtype, qfreq=qfreq) self.adjust = config.get('adjust_input', False) self.dasmgr = None # defined at run-time via self.init() self.reqmgr = None # defined at run-time via self.init() self.daskeys = [] # defined at run-time via self.init() self.colors = {} # defined at run-time via self.init() self.dbs_url = None # defined at run-time via self.init() self.dbs_global = None # defined at run-time via self.init() self.dbs_instances = [] # defined at run-time via self.init() self.kws = None # defined at run-time via self.init() self.q_rewriter = None # defined at run-time via self.init() self.dataset_daemon = None self.dbsmgr = {} # dbs_urls vs dbs_daemons, defined at run-time self.daskeyslist = [] # list of DAS keys self.init() self.dbs_init(config) # Monitoring thread which performs auto-reconnection thname = 'dascore_monitor' start_new_thread(thname, dascore_monitor, \ ({'das':self.dasmgr, 'uri':self.dburi}, self.init, 5))
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) self.dasconfig = dasconfig self.dburi = self.dasconfig['mongodb']['dburi'] self.dasmgr = None # defined at run-time via self.init() self.kws = None # defined at run-time via self.init() self.init() # Monitoring thread which performs auto-reconnection thname = 'dascore_monitor_kws' start_new_thread(thname, dascore_monitor, ({'das': self.dasmgr, 'uri': self.dburi}, self.init, 5))
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) self.dasconfig = dasconfig self.dburi = self.dasconfig['mongodb']['dburi'] self.dasmgr = None # defined at run-time via self.init() self.kws = None # defined at run-time via self.init() self.init() # Monitoring thread which performs auto-reconnection thname = 'dascore_monitor_kws' start_new_thread(thname, dascore_monitor, ({ 'das': self.dasmgr, 'uri': self.dburi }, self.init, 5))
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) config = dasconfig['web_server'] self.pid_pat = re.compile(r'^[a-z0-9]{32}') self.base = config['url_base'] self.interval = config.get('status_update', 2500) self.engine = config.get('engine', None) nworkers = config['number_of_workers'] self.hot_thr = config.get('hot_threshold', 3000) self.dasconfig = dasconfig self.dburi = self.dasconfig['mongodb']['dburi'] self.lifetime = self.dasconfig['mongodb']['lifetime'] self.queue_limit = config.get('queue_limit', 50) if self.engine: thr_name = 'DASWebService:PluginTaskManager' self.taskmgr = PluginTaskManager(\ bus=self.engine, nworkers=nworkers, name=thr_name) self.taskmgr.subscribe() else: thr_name = 'DASWebService:TaskManager' self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name) self.adjust = config.get('adjust_input', False) self.init() # Monitoring thread which performs auto-reconnection thread.start_new_thread(dascore_monitor, \ ({'das':self.dasmgr, 'uri':self.dburi}, self.init, 5)) # Obtain DBS global instance or set it as None if self.dasconfig.has_key('dbs'): self.dbs_global = \ self.dasconfig['dbs'].get('dbs_global_instance', None) self.dbs_instances = \ self.dasconfig['dbs'].get('dbs_instances', []) else: self.dbs_global = None self.dbs_instances = [] # Start DBS daemon self.dataset_daemon = config.get('dbs_daemon', False) if self.dataset_daemon: self.dbs_daemon(config)
def __init__(self, dasconfig): DASWebManager.__init__(self, dasconfig) config = dasconfig["web_server"] self.pid_pat = re.compile(r"^[a-z0-9]{32}") self.base = config["url_base"] self.interval = config.get("status_update", 2500) self.engine = config.get("engine", None) self.check_clients = config.get("check_clients", False) nworkers = config["web_workers"] self.hot_thr = config.get("hot_threshold", 3000) self.dasconfig = dasconfig self.dburi = self.dasconfig["mongodb"]["dburi"] self.lifetime = self.dasconfig["mongodb"]["lifetime"] self.queue_limit = config.get("queue_limit", 50) qtype = config.get("qtype", "Queue") if qtype not in ["Queue", "PriorityQueue"]: msg = "Wrong queue type, qtype=%s" % qtype raise Exception(msg) if self.engine: thr_name = "DASWebService:PluginTaskManager" self.taskmgr = PluginTaskManager(bus=self.engine, nworkers=nworkers, name=thr_name, qtype=qtype) self.taskmgr.subscribe() else: thr_name = "DASWebService:TaskManager" self.taskmgr = TaskManager(nworkers=nworkers, name=thr_name, qtype=qtype) self.adjust = config.get("adjust_input", False) self.dasmgr = None # defined at run-time via self.init() self.reqmgr = None # defined at run-time via self.init() self.daskeys = [] # defined at run-time via self.init() self.colors = {} # defined at run-time via self.init() self.dbs_url = None # defined at run-time via self.init() self.dbs_global = None # defined at run-time via self.init() self.kws = None # defined at run-time via self.init() self.q_rewriter = None # defined at run-time via self.init() self.dataset_daemon = config.get("dbs_daemon", False) self.dbsmgr = {} # dbs_urls vs dbs_daemons, defined at run-time self.daskeyslist = [] # list of DAS keys self.init() # Monitoring thread which performs auto-reconnection thname = "dascore_monitor" start_new_thread(thname, dascore_monitor, ({"das": self.dasmgr, "uri": self.dburi}, self.init, 5))
def __init__(self, config): DASWebManager.__init__(self, config) loglevel = config.get('loglevel', 0) self.logger = logging.getLogger('DASTestDataService') hdlr = logging.StreamHandler() set_cherrypy_logger(hdlr, loglevel) # force to load the page all the time cherrypy.response.headers['Cache-Control'] = 'no-cache' cherrypy.response.headers['Pragma'] = 'no-cache' if not loglevel: # be really quiet hdlr = NullHandler() logger = logging.getLogger('foo') logger.addHandler(hdlr) logger.setLevel(logging.NOTSET) cherrypy.log.logger_root = logger # get list of registered in this class systems # we inpsect the class and get all names except in elist elist = ['index', 'default'] # exclude from search below self.systems = [m[0] for m in getmembers(self) \ if m[0][:2] != '__' and m[0] not in elist]
def __init__(self, config={}): DASWebManager.__init__(self, config) try: # try what is supplied from WebTools framework cdict = self.config.dictionary_() self.cachesrv = cdict.get("cache_server_url", "http://localhost:8211") self.base = "/dascontrollers" except: # stand-alone version self.cachesrv = config.get("cache_server_url", "http://localhost:8211") self.base = "/das" self.dasmgr = DASCore() self.daskeys = self.dasmgr.das_keys() self.daskeys.sort() self.dasmapping = self.dasmgr.mapping self.daslogger = self.dasmgr.logger self.pageviews = ["xml", "list", "json", "yuijson"] msg = "DASSearch::init is started with base=%s" % self.base self.daslogger.debug(msg) print msg
def __init__(self, config={}): DASWebManager.__init__(self, config) try: # try what is supplied from WebTools framework cdict = self.config.dictionary_() self.cachesrv = cdict.get('cache_server_url', 'http://localhost:8211') self.base = '/dascontrollers' except: # stand-alone version self.cachesrv = config.get('cache_server_url', 'http://localhost:8211') self.base = '/das' self.dasmgr = DASCore() self.daskeys = self.dasmgr.das_keys() self.daskeys.sort() self.dasmapping = self.dasmgr.mapping self.daslogger = self.dasmgr.logger self.pageviews = ['xml', 'list', 'json', 'yuijson'] msg = "DASSearch::init is started with base=%s" % self.base self.daslogger.debug(msg) print(msg)
def __init__(self, config): DASWebManager.__init__(self, config) self.base = config['web_server'].get('url_base', 'das') self.dasconfig = das_readconfig()
def __init__(self, config): self.config = config DASWebManager.__init__(self, config) self.version = __version__ self.methods = {} self.methods['GET']= { 'request': {'args':['idx', 'limit', 'query', 'skey', 'order'], 'call': self.request, 'version':__version__}, 'nresults': {'args':['query'], 'call': self.nresults, 'version':__version__}, 'records': {'args':['query', 'count', 'collection'], 'call': self.records, 'version':__version__}, 'status': {'args':['query'], 'call': self.status, 'version':__version__}, } self.methods['POST']= {'create': {'args':['query', 'expire'], 'call': self.create, 'version':__version__}} self.methods['PUT']= {'replace': {'args':['query', 'expire'], 'call': self.replace, 'version':__version__}} self.methods['DELETE']= {'delete': {'args':['query'], 'call': self.delete, 'version':__version__}} try: # WMCore/WebTools rest = RESTModel(config) rest.methods = self.methods # set RESTModel methods self.model = self # re-reference model to my class self.model.handler = rest.handler # reference handler to RESTModel cdict = self.config.dictionary_() self.base = '/rest' except: cdict = {} self.base = '' self.dascore = DASCore() dbhost = self.dascore.dasconfig['mongocache_dbhost'] dbport = self.dascore.dasconfig['mongocache_dbport'] capped_size = self.dascore.dasconfig['mongocache_capped_size'] self.con = Connection(dbhost, dbport) if 'logging' not in self.con.database_names(): db = self.con['logging'] options = {'capped':True, 'size': capped_size} db.create_collection('db', options) self.warning('Created logging.db, size=%s' % capped_size) self.col = self.con['logging']['db'] sleep = cdict.get('sleep', 2) verbose = cdict.get('verbose', None) iconfig = {'sleep':sleep, 'verbose':verbose, 'logger':self.dascore.logger} self.cachemgr = DASCacheMgr(iconfig) thread.start_new_thread(self.cachemgr.worker, (worker, )) msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \ % (dbhost, dbport, capped_size) self.dascore.logger.debug(msg) print msg
def __init__(self, config): self.config = config DASWebManager.__init__(self, config) self.version = __version__ self.methods = {} self.methods['GET'] = { 'request': { 'args': ['idx', 'limit', 'query', 'skey', 'order'], 'call': self.request, 'version': __version__ }, 'nresults': { 'args': ['query'], 'call': self.nresults, 'version': __version__ }, 'records': { 'args': ['query', 'count', 'collection'], 'call': self.records, 'version': __version__ }, 'status': { 'args': ['query'], 'call': self.status, 'version': __version__ }, } self.methods['POST'] = { 'create': { 'args': ['query', 'expire'], 'call': self.create, 'version': __version__ } } self.methods['PUT'] = { 'replace': { 'args': ['query', 'expire'], 'call': self.replace, 'version': __version__ } } self.methods['DELETE'] = { 'delete': { 'args': ['query'], 'call': self.delete, 'version': __version__ } } try: # WMCore/WebTools rest = RESTModel(config) rest.methods = self.methods # set RESTModel methods self.model = self # re-reference model to my class self.model.handler = rest.handler # reference handler to RESTModel cdict = self.config.dictionary_() self.base = '/rest' except: cdict = {} self.base = '' self.dascore = DASCore() dbhost = self.dascore.dasconfig['mongocache_dbhost'] dbport = self.dascore.dasconfig['mongocache_dbport'] capped_size = self.dascore.dasconfig['mongocache_capped_size'] self.con = Connection(dbhost, dbport) if 'logging' not in self.con.database_names(): db = self.con['logging'] options = {'capped': True, 'size': capped_size} db.create_collection('db', options) self.warning('Created logging.db, size=%s' % capped_size) self.col = self.con['logging']['db'] sleep = cdict.get('sleep', 2) verbose = cdict.get('verbose', None) iconfig = { 'sleep': sleep, 'verbose': verbose, 'logger': self.dascore.logger } self.cachemgr = DASCacheMgr(iconfig) thread.start_new_thread(self.cachemgr.worker, (worker, )) msg = 'DASCacheMode::init, host=%s, port=%s, capped_size=%s' \ % (dbhost, dbport, capped_size) self.dascore.logger.debug(msg) print(msg)