def __init__(self, single_mode=False, stop_event=None, daemon_mode=True): # initialize database and config self.singleMode = single_mode self.stopEvent = stop_event self.daemonMode = daemon_mode from pandaharvester.harvestercore.communicator_pool import CommunicatorPool self.communicatorPool = CommunicatorPool() from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper self.queueConfigMapper = QueueConfigMapper() from pandaharvester.harvestercore.db_proxy_pool import DBProxyPool as DBProxy dbProxy = DBProxy() dbProxy.make_tables(self.queueConfigMapper)
os.remove(harvester_config.db.database_filename) except Exception: pass for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict): if loggerName.startswith('panda.log'): if len(loggerObj.handlers) == 0: continue if loggerName.split('.')[-1] in ['db_proxy']: continue stdoutHandler = logging.StreamHandler(sys.stdout) stdoutHandler.setFormatter(loggerObj.handlers[0].formatter) loggerObj.addHandler(stdoutHandler) queueConfigMapper = QueueConfigMapper() proxy = DBProxy() proxy.make_tables(queueConfigMapper) job = JobSpec() job.PandaID = 1 job.modificationTime = datetime.datetime.now() proxy.insert_jobs([job]) newJob = proxy.get_job(1) a = CommunicatorPool() a.get_jobs('siteName', 'nodeName', 'prodSourceLabel', 'computingElement', 1, {})
except Exception: pass for loggerName, loggerObj in iteritems(logging.Logger.manager.loggerDict): if loggerName.startswith('panda.log'): if len(loggerObj.handlers) == 0: continue if loggerName.split('.')[-1] in ['db_proxy']: continue stdoutHandler = logging.StreamHandler(sys.stdout) stdoutHandler.setFormatter(loggerObj.handlers[0].formatter) loggerObj.addHandler(stdoutHandler) queueConfigMapper = QueueConfigMapper() proxy = DBProxy() proxy.make_tables(queueConfigMapper) job = JobSpec() job.PandaID = 1 job.modificationTime = datetime.datetime.now() proxy.insert_jobs([job]) newJob = proxy.get_job(1) a = CommunicatorPool() a.get_jobs('siteName', 'nodeName', 'prodSourceLabel', 'computingElement', 1, {})