def __init__(self, **kwarg):
        self.logBaseURL = None
        PluginBase.__init__(self, **kwarg)

        self.k8s_client = k8s_Client(namespace=self.k8s_namespace, config_file=self.k8s_config_file)

        # number of processes
        try:
            self.nProcesses
        except AttributeError:
            self.nProcesses = 1
        else:
            if (not self.nProcesses) or (self.nProcesses < 1):
                self.nProcesses = 1
        # x509 proxy
        try:
            self.x509UserProxy
        except AttributeError:
            if os.getenv('X509_USER_PROXY'):
                self.x509UserProxy = os.getenv('X509_USER_PROXY')

        # CPU adjust ratio
        try:
            self.cpuAdjustRatio
        except AttributeError:
            self.cpuAdjustRatio = 100

        # Memory adjust ratio
        try:
            self.memoryAdjustRatio
        except AttributeError:
            self.memoryAdjustRatio = 100
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 4
     try:
         self.cancelUnknown
     except AttributeError:
         self.cancelUnknown = False
     else:
         self.cancelUnknown = bool(self.cancelUnknown)
     try:
         self.heldTimeout
     except AttributeError:
         self.heldTimeout = 3600
     try:
         self.cacheEnable = harvester_config.monitor.pluginCacheEnable
     except AttributeError:
         self.cacheEnable = False
     try:
         self.cacheRefreshInterval = harvester_config.monitor.pluginCacheRefreshInterval
     except AttributeError:
         self.cacheRefreshInterval = harvester_config.monitor.checkInterval
     try:
         self.useCondorHistory
     except AttributeError:
         self.useCondorHistory = True
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     # make logger
     mainLog = self.make_logger(_logger, method_name='__init__')
     # attributes
     try:
         self.inFile
     except AttributeError:
         self.inFile = self.inCertFile
     # parse inFile setup conficuration
     try:
         with open(self.inFile) as f:
             self.setupMap = json.load(f)
     except Exception as e:
         mainLog.error('Error with inFile/inCertFile . {0}: {1}'.format(
                             e.__class__.__name__, e))
         self.setupMap = {}
         raise
     # validate setupMap
     try:
         self.k8s_namespace = self.setupMap['k8s_namespace']
         self.k8s_config_file = self.setupMap['k8s_config_file']
         self.proxy_file_list = self.setupMap['proxy_files']
         self.secret_name = self.setupMap.get('secret_name', 'proxy-secret')
     except KeyError as e:
         mainLog.error('Missing setup in inFile/inCertFile . {0}: {1}'.format(
                             e.__class__.__name__, e))
         raise
     # k8s client
     self.k8s_client = k8s_Client(namespace=self.k8s_namespace, config_file=self.k8s_config_file)
Example #4
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 4
     try:
         self.cancelUnknown
     except AttributeError:
         self.cancelUnknown = False
     else:
         self.cancelUnknown = bool(self.cancelUnknown)
     try:
         self.heldTimeout
     except AttributeError:
         self.heldTimeout = 3600
     try:
         self.cacheEnable = harvester_config.monitor.pluginCacheEnable
     except AttributeError:
         self.cacheEnable = False
     try:
         self.cacheRefreshInterval = harvester_config.monitor.pluginCacheRefreshInterval
     except AttributeError:
         self.cacheRefreshInterval = harvester_config.monitor.checkInterval
     try:
         self.useCondorHistory
     except AttributeError:
         self.useCondorHistory = True
Example #5
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        self.log = core_utils.make_logger(baseLogger,
                                          'aCT sweeper',
                                          method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     # create Globus Transfer Client
     tmpLog = self.make_logger(_logger, method_name='GoPreparator __init__ ')
     try:
         self.tc = None
         # need to get client_id and refresh_token from PanDA server via harvester cache mechanism
         tmpLog.debug('about to call dbInterface.get_cache(globus_secret)')
         c_data = self.dbInterface.get_cache('globus_secret')
         if (not c_data == None) and  c_data.data['StatusCode'] == 0 :
            tmpLog.debug('Got the globus_secrets from PanDA')
            self.client_id = c_data.data['publicKey']  # client_id
            self.refresh_token = c_data.data['privateKey'] # refresh_token
            tmpStat, self.tc = globus_utils.create_globus_transfer_client(tmpLog,self.client_id,self.refresh_token)
            if not tmpStat:
               self.tc = None
               errStr = 'failed to create Globus Transfer Client'
               tmpLog.error(errStr)
         else :
            self.client_id = None
            self.refresh_token = None
            self.tc = None
            errStr = 'failed to get Globus Client ID and Refresh Token'
            tmpLog.error(errStr)
     except:
         core_utils.dump_error_message(tmpLog)
     tmpLog.debug('__init__ finished')
Example #7
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     with uLock:
         global uID
         self.dummy_transfer_id = '{0}_{1}'.format(dummy_transfer_id_base, uID)
         uID += 1
         uID %= harvester_config.preparator.nThreads
Example #8
0
    def __init__(self, **kwarg):
        self.jobAttributesToUse = [
            'nCore', 'minRamCount', 'maxDiskCount', 'maxWalltime'
        ]
        PluginBase.__init__(self, **kwarg)

        self.rt_mapper = ResourceTypeMapper()
Example #9
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        self.hostname = socket.getfqdn()
        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
        # Credential dictionary role: proxy file
        self.certs = dict(zip([r.split('=')[1] for r in list(harvester_config.credmanager.voms)],
                              list(harvester_config.credmanager.outCertFile)))
        # Map of role to aCT proxyid
        self.proxymap = {}

        # Get proxy info
        # TODO: better to send aCT the proxy file and let it handle it
        for role, proxy in self.certs.items():
            cred_type = arc.initializeCredentialsType(arc.initializeCredentialsType.SkipCredentials)
            uc = arc.UserConfig(cred_type)
            uc.ProxyPath(str(proxy))
            cred = arc.Credential(uc)
            dn = cred.GetIdentityName()
    
            actp = aCTProxy(self.log)
            attr = '/atlas/Role='+role
            proxyid = actp.getProxyId(dn, attr)
            if not proxyid:
                raise Exception("Proxy with DN {0} and attribute {1} was not found in proxies table".format(dn, attr))

            self.proxymap[role] = proxyid
Example #10
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     
     # Credential dictionary role: proxy file
     self.certs = dict(zip([r.split('=')[1] for r in list(harvester_config.credmanager.voms)],
                           list(harvester_config.credmanager.outCertFile)))
     self.cred_type = arc.initializeCredentialsType(arc.initializeCredentialsType.SkipCredentials)
Example #11
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger,
                                          'aCT submitter',
                                          method_name='__init__')
        self.conf = aCTConfigARC()
        self.actDB = aCTDBPanda(self.log, self.conf.get(["db", "file"]))

        # Get proxy info
        # TODO: specify DN in conf instead
        cred_type = arc.initializeCredentialsType(
            arc.initializeCredentialsType.SkipCredentials)
        uc = arc.UserConfig(cred_type)
        uc.ProxyPath(str(self.conf.get(['voms', 'proxypath'])))
        cred = arc.Credential(uc)
        dn = cred.GetIdentityName()
        self.log.info("Running under DN %s" % dn)

        # Set up proxy map (prod/pilot roles)
        self.proxymap = {}
        actp = aCTProxy(self.log)
        for role in self.conf.getList(['voms', 'roles', 'item']):
            attr = '/atlas/Role=' + role
            proxyid = actp.getProxyId(dn, attr)
            if not proxyid:
                raise Exception(
                    "Proxy with DN {0} and attribute {1} was not found in proxies table"
                    .format(dn, attr))

            self.proxymap[role] = proxyid
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     # make logger
     mainLog = self.make_logger(_logger, method_name='__init__')
     # attributes
     try:
         self.inFile
     except AttributeError:
         self.inFile = self.inCertFile
     # parse inFile setup conficuration
     try:
         with open(self.inFile) as f:
             self.setupMap = json.load(f)
     except Exception as e:
         mainLog.error('Error with inFile/inCertFile . {0}: {1}'.format(
             e.__class__.__name__, e))
         self.setupMap = {}
         raise
     # validate setupMap
     try:
         self.k8s_namespace = self.setupMap['k8s_namespace']
         self.k8s_config_file = self.setupMap['k8s_config_file']
         self.proxy_file_list = self.setupMap['proxy_files']
         self.secret_name = self.setupMap.get('secret_name', 'proxy-secret')
     except KeyError as e:
         mainLog.error(
             'Missing setup in inFile/inCertFile . {0}: {1}'.format(
                 e.__class__.__name__, e))
         raise
     # k8s client
     self.k8s_client = k8s_Client(namespace=self.k8s_namespace,
                                  config_file=self.k8s_config_file)
Example #13
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     # create Globus Transfer Client
     tmpLog = core_utils.make_logger(_logger,
                                     method_name='GoPreparator __init__ ')
     try:
         self.tc = None
         # need to get client_id and refresh_token from PanDA server via harvester cache mechanism
         tmpLog.debug('about to call dbInterface.get_cache(globus_secret)')
         c_data = self.dbInterface.get_cache('globus_secret')
         if (not c_data == None) and c_data.data['StatusCode'] == 0:
             tmpLog.debug('Got the globus_secrets from PanDA')
             self.client_id = c_data.data['publicKey']  # client_id
             self.refresh_token = c_data.data['privateKey']  # refresh_token
             tmpStat, self.tc = globus_utils.create_globus_transfer_client(
                 tmpLog, self.client_id, self.refresh_token)
             if not tmpStat:
                 self.tc = None
                 errStr = 'failed to create Globus Transfer Client'
                 tmpLog.error(errStr)
         else:
             self.client_id = None
             self.refresh_token = None
             self.tc = None
             errStr = 'failed to get Globus Client ID and Refresh Token'
             tmpLog.error(errStr)
     except:
         core_utils.dump_error_message(tmpLog)
     tmpLog.debug('__init__ finished')
Example #14
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__')
        self.conf = aCTConfigARC()
        self.actDB = aCTDBPanda(self.log, self.conf.get(["db", "file"]))
Example #15
0
    def __init__(self, **kwarg):
        self.logBaseURL = None
        PluginBase.__init__(self, **kwarg)

        self.k8s_client = k8s_Client(namespace=self.k8s_namespace,
                                     config_file=self.k8s_config_file)

        # number of processes
        try:
            self.nProcesses
        except AttributeError:
            self.nProcesses = 1
        else:
            if (not self.nProcesses) or (self.nProcesses < 1):
                self.nProcesses = 1
        # x509 proxy
        try:
            self.x509UserProxy
        except AttributeError:
            if os.getenv('X509_USER_PROXY'):
                self.x509UserProxy = os.getenv('X509_USER_PROXY')

        # CPU adjust ratio
        try:
            self.cpuAdjustRatio
        except AttributeError:
            self.cpuAdjustRatio = 100

        # Memory adjust ratio
        try:
            self.memoryAdjustRatio
        except AttributeError:
            self.memoryAdjustRatio = 100
Example #16
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
        # Credential dictionary role: proxy file
        self.certs = dict(zip([r.split('=')[1] for r in list(harvester_config.credmanager.voms)],
                              list(harvester_config.credmanager.outCertFile)))
        # Map of role to aCT proxyid
        self.proxymap = {}

        # Get proxy info
        # TODO: better to send aCT the proxy file and let it handle it
        for role, proxy in self.certs.items():
            cred_type = arc.initializeCredentialsType(arc.initializeCredentialsType.SkipCredentials)
            uc = arc.UserConfig(cred_type)
            uc.ProxyPath(str(proxy))
            cred = arc.Credential(uc)
            dn = cred.GetIdentityName()
            self.log.info("Proxy {0} with DN {1} and role {2}".format(proxy, dn, role))
    
            actp = aCTProxy(self.log)
            attr = '/atlas/Role='+role
            proxyid = actp.getProxyId(dn, attr)
            if not proxyid:
                raise Exception("Proxy with DN {0} and attribute {1} was not found in proxies table".format(dn, attr))

            self.proxymap[role] = proxyid
Example #17
0
 def __init__(self, **kwarg):
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # number of processes
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 1
     else:
         if (not self.nProcesses) or (self.nProcesses < 1):
             self.nProcesses = 1
     # executable file
     try:
         self.executableFile
     except AttributeError:
         self.executableFile = None
     # condor log directory
     try:
         self.logDir
     except AttributeError:
         self.logDir = os.getenv('TMPDIR') or '/tmp'
     # x509 proxy
     try:
         self.x509UserProxy
     except AttributeError:
         self.x509UserProxy = os.getenv('X509_USER_PROXY')
     # ATLAS AGIS
     try:
         self.useAtlasAGIS = bool(self.useAtlasAGIS)
     except AttributeError:
         self.useAtlasAGIS = False
     # ATLAS Grid CE, requiring AGIS
     try:
         self.useAtlasGridCE = bool(self.useAtlasGridCE)
     except AttributeError:
         self.useAtlasGridCE = False
     finally:
         self.useAtlasAGIS = self.useAtlasAGIS or self.useAtlasGridCE
     # sdf template directories of CEs
     try:
         self.CEtemplateDir
     except AttributeError:
         self.CEtemplateDir = ''
     # remote condor schedd and pool name (collector), and spool option
     try:
         self.condorSchedd
     except AttributeError:
         self.condorSchedd = None
     try:
         self.condorPool
     except AttributeError:
         self.condorPool = None
     try:
         self.useSpool
     except AttributeError:
         self.useSpool = True
     # record of information of CE statistics
     self.ceStatsLock = threading.Lock()
     self.ceStats = dict()
Example #18
0
 def __init__(self, **kwarg):
     self.jobAttributesToUse = [
         'nCore', 'minRamCount', 'maxDiskCount', 'maxWalltime'
     ]
     self.adjusters = None
     PluginBase.__init__(self, **kwarg)
     self.init_adjusters_defaults()
     self.dyn_resources = None
 def __init__(self, **kwarg):
     self.uploadLog = False
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # template for batch script
     tmpFile = open(self.templateFile)
     self.template = tmpFile.read()
     tmpFile.close()
Example #20
0
 def __init__(self, **kwarg):
     dict.__init__(self)
     PluginBase.__init__(self, **kwarg)
     dbInterface = DBInterface()
     cacher_key = kwarg.get('cacher_key', 'panda_queues.json')
     panda_queues_cache = dbInterface.get_cache(cacher_key)
     if panda_queues_cache:
         self.update(panda_queues_cache.data)
Example #21
0
 def __init__(self, **kwarg):
     self.uploadLog = False
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # template for batch script
     tmpFile = open(self.templateFile)
     self.template = tmpFile.read()
     tmpFile.close()
Example #22
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger,
                                          'aCT submitter',
                                          method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
Example #23
0
    def __init__(self, **kwarg):
        self.logBaseURL = None
        PluginBase.__init__(self, **kwarg)

        self.k8s_client = k8s_Client(namespace=self.k8s_namespace,
                                     config_file=self.k8s_config_file)

        # required for parsing jobParams
        self.parser = argparse.ArgumentParser()
        self.parser.add_argument('-p', dest='executable', type=unquote)
        self.parser.add_argument('--containerImage', dest='container_image')

        # number of processes
        try:
            self.nProcesses
        except AttributeError:
            self.nProcesses = 1
        else:
            if (not self.nProcesses) or (self.nProcesses < 1):
                self.nProcesses = 1
        # x509 proxy: obsolete mode
        try:
            self.x509UserProxy
        except AttributeError:
            if os.getenv('X509_USER_PROXY'):
                self.x509UserProxy = os.getenv('X509_USER_PROXY')

        # x509 proxy for analysis jobs in grandly unified queues
        try:
            self.x509UserProxyAnalysis
        except AttributeError:
            self.x509UserProxyAnalysis = os.getenv('X509_USER_PROXY_ANAL')

        # x509 proxy through k8s secrets: preferred way
        try:
            self.proxySecretPath
        except AttributeError:
            if os.getenv('PROXY_SECRET_PATH'):
                self.proxySecretPath = os.getenv('PROXY_SECRET_PATH')

        # analysis x509 proxy through k8s secrets: on GU queues
        try:
            self.proxySecretPathAnalysis
        except AttributeError:
            if os.getenv('PROXY_SECRET_PATH_ANAL'):
                self.proxySecretPath = os.getenv('PROXY_SECRET_PATH_ANAL')

        # CPU adjust ratio
        try:
            self.cpuAdjustRatio
        except AttributeError:
            self.cpuAdjustRatio = 100

        # Memory adjust ratio
        try:
            self.memoryAdjustRatio
        except AttributeError:
            self.memoryAdjustRatio = 100
Example #24
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        self.log = core_utils.make_logger(baseLogger, 'aCT sweeper', method_name='__init__')
        try:
            self.actDB = aCTDBPanda(self.log)
        except Exception as e:
            self.log.error('Could not connect to aCT database: {0}'.format(str(e)))
            self.actDB = None
Example #25
0
 def __init__(self, **kwargs):
     dict.__init__(self)
     PluginBase.__init__(self, **kwargs)
     self.lock = threading.Lock()
     self.dbInterface = DBInterface()
     self.cacher_key = kwargs.get('cacher_key', 'panda_queues.json')
     self.refresh_period = resolver_config.get('refreshPeriod', 300)
     self.last_refresh_ts = 0
     self._refresh()
Example #26
0
    def __init__(self, **kwarg):
        self.jobSpecFileFormat = 'json'
        PluginBase.__init__(self, **kwarg)
        self.schedulerid = harvester_config.master.harvester_id
        self.tmpdir = '/tmp' # TODO configurable or common function

        # Credential dictionary role: proxy file
        self.certs = dict(zip([r.split('=')[1] for r in list(harvester_config.credmanager.voms)],
                              list(harvester_config.credmanager.outCertFile)))
        self.cred_type = arc.initializeCredentialsType(arc.initializeCredentialsType.SkipCredentials)
Example #27
0
 def __init__(self, **kwarg):
     self.nProcesses = 1
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # template for batch script
     tmpFile = open(self.templateFile)
     self.template = tmpFile.read()
     tmpFile.close()
     # number of processes
     if self.nProcesses < 1:
         self.nProcesses = None
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # States taken from: https://cloud.google.com/compute/docs/instances/checking-instance-status
        self.vm_to_worker_status = {
                                     'RUNNING': WorkSpec.ST_running,
                                     'TERMINATED': WorkSpec.ST_finished,
                                     'STOPPING': WorkSpec.ST_finished,
                                     'PROVISIONING': WorkSpec.ST_submitted,
                                     'STAGING': WorkSpec.ST_submitted
                                     }
Example #29
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Credential dictionary role: proxy file
        self.certs = dict(
            zip([
                r.split('=')[1]
                for r in list(harvester_config.credmanager.voms)
            ], list(harvester_config.credmanager.outCertFile)))
        self.cred_type = arc.initializeCredentialsType(
            arc.initializeCredentialsType.SkipCredentials)
Example #30
0
    def __init__(self, **kwarg):
        '''Set up DB connection and credentials'''
        PluginBase.__init__(self, **kwarg)

        self.dbproxy = DBProxy()
        self.schedulerid = harvester_config.master.harvester_id

        # Credential dictionary role: proxy file
        self.certs = dict(zip([r.split('=')[1] for r in list(harvester_config.credmanager.voms)],
                              list(harvester_config.credmanager.outCertFile)))
        self.cred_type = arc.initializeCredentialsType(arc.initializeCredentialsType.SkipCredentials)
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)
        self.queue_config_mapper = QueueConfigMapper()

        # States taken from: https://cloud.google.com/compute/docs/instances/checking-instance-status
        self.vm_to_worker_status = {
                                     'RUNNING': WorkSpec.ST_running,
                                     'TERMINATED': WorkSpec.ST_running, # the VM is stopped, but has to be fully deleted
                                     'STOPPING': WorkSpec.ST_finished,
                                     'PROVISIONING': WorkSpec.ST_submitted,
                                     'STAGING': WorkSpec.ST_submitted
                                     }
Example #32
0
    def __init__(self, **kwarg):
        self.logBaseURL = None
        PluginBase.__init__(self, **kwarg)

        self.k8s_client = k8s_Client(namespace=self.k8s_namespace, config_file=self.k8s_config_file)

        # update or create the pilot starter executable
        self.k8s_client.create_or_patch_configmap_starter()

        # required for parsing jobParams
        self.parser = argparse.ArgumentParser()
        self.parser.add_argument('-p', dest='executable', type=unquote)
        self.parser.add_argument('--containerImage', dest='container_image')

        # allowed associated parameters from AGIS
        self._allowed_agis_attrs = (
                'pilot_url',
            )

        # number of processes
        try:
            self.nProcesses
        except AttributeError:
            self.nProcesses = 1
        else:
            if (not self.nProcesses) or (self.nProcesses < 1):
                self.nProcesses = 1

        # x509 proxy through k8s secrets: preferred way
        try:
            self.proxySecretPath
        except AttributeError:
            if os.getenv('PROXY_SECRET_PATH'):
                self.proxySecretPath = os.getenv('PROXY_SECRET_PATH')

        # analysis x509 proxy through k8s secrets: on GU queues
        try:
            self.proxySecretPathAnalysis
        except AttributeError:
            if os.getenv('PROXY_SECRET_PATH_ANAL'):
                self.proxySecretPath = os.getenv('PROXY_SECRET_PATH_ANAL')

        # CPU adjust ratio
        try:
            self.cpuAdjustRatio
        except AttributeError:
            self.cpuAdjustRatio = 100

        # Memory adjust ratio
        try:
            self.memoryAdjustRatio
        except AttributeError:
            self.memoryAdjustRatio = 100
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)
        self.queue_config_mapper = QueueConfigMapper()

        # States taken from: https://cloud.google.com/compute/docs/instances/checking-instance-status
        self.vm_to_worker_status = {
            'RUNNING': WorkSpec.ST_running,
            'TERMINATED': WorkSpec.
            ST_running,  # the VM is stopped, but has to be fully deleted
            'STOPPING': WorkSpec.ST_finished,
            'PROVISIONING': WorkSpec.ST_submitted,
            'STAGING': WorkSpec.ST_submitted
        }
Example #34
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     if hasattr(self, 'database_filename'):
         _db_filename = self.database_filename
     else:
         _db_filename = harvester_config.fifo.database_filename
     self.db_path = os.path.abspath(
         re.sub('\$\(AGENT\)', self.agentName, _db_filename))
     self._connection_cache = {}
     with self._get_conn() as conn:
         conn.execute(self._exclusive_lock_sql)
         conn.execute(self._create_sql)
         conn.execute(self._create_index_sql)
         conn.commit()
Example #35
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     try:
         sshUserName = self.sshUserName
     except Exception:
         sshUserName = None
     try:
         sshPassword = self.sshPassword
     except Exception:
         sshPassword = None
     try:
         privateKey = self.privateKey
     except Exception:
         privateKey = None
     try:
         passPhrase = self.passPhrase
     except Exception:
         passPhrase = None
     try:
         jumpHost = self.jumpHost
     except Exception:
         jumpHost = None
     try:
         jumpPort = self.jumpPort
     except Exception:
         jumpPort = 22
     try:
         remotePort = self.remotePort
     except Exception:
         remotePort = 22
     sshTunnelPool.make_tunnel_server(self.remoteHost,
                                      remotePort,
                                      self.remoteBindPort,
                                      self.numTunnels,
                                      ssh_username=sshUserName,
                                      ssh_password=sshPassword,
                                      private_key=privateKey,
                                      pass_phrase=passPhrase,
                                      jump_host=jumpHost,
                                      jump_port=jumpPort)
     tunnelHost, tunnelPort, tunnelCore = sshTunnelPool.get_tunnel(
         self.remoteHost, remotePort)
     self.conn = rpyc.connect(tunnelHost,
                              tunnelPort,
                              config={
                                  "allow_all_attrs": True,
                                  "allow_setattr": True,
                                  "allow_delattr": True
                              })
Example #36
0
    def __init__(self, **kwarg):
        '''Set up DB connection and credentials'''
        PluginBase.__init__(self, **kwarg)

        self.dbproxy = DBProxy()
        self.schedulerid = harvester_config.master.harvester_id

        # Credential dictionary role: proxy file
        self.certs = dict(
            zip([
                r.split('=')[1]
                for r in list(harvester_config.credmanager.voms)
            ], list(harvester_config.credmanager.outCertFile)))
        self.cred_type = arc.initializeCredentialsType(
            arc.initializeCredentialsType.SkipCredentials)
 def __init__(self, **kwarg):
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # template for batch script
     tmpFile = open(self.templateFile)
     self.template = tmpFile.read()
     tmpFile.close()
     # number of processes
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 1
     else:
         if (not self.nProcesses) or (self.nProcesses < 1):
             self.nProcesses = 1
Example #38
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     if hasattr(self, 'database_filename'):
         _db_filename = self.database_filename
     else:
         _db_filename = harvester_config.fifo.database_filename
     _db_filename = re.sub('\$\(TITLE\)', self.titleName, _db_filename)
     _db_filename = re.sub('\$\(AGENT\)', self.titleName, _db_filename)
     self.db_path = os.path.abspath(_db_filename)
     self._connection_cache = {}
     with self._get_conn() as conn:
         conn.execute(self._exclusive_lock_sql)
         conn.execute(self._create_sql)
         conn.execute(self._create_index_sql)
         conn.commit()
Example #39
0
 def __init__(self, **kwarg):
     dict.__init__(self)
     PluginBase.__init__(self, **kwarg)
     dbInterface = DBInterface()
     cacher_key = kwarg.get('cacher_key', 'panda_queues.json')
     panda_queues_cache = dbInterface.get_cache(cacher_key)
     if panda_queues_cache and isinstance(panda_queues_cache.data, dict):
         panda_queues_dict = panda_queues_cache.data
         for (k, v) in iteritems(panda_queues_dict):
             try:
                 panda_resource = v['panda_resource']
                 assert k == v['nickname']
             except Exception:
                 pass
             else:
                 self[panda_resource] = v
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 4
     try:
         self.cancelUnknown
     except AttributeError:
         self.cancelUnknown = False
     else:
         self.cancelUnknown = bool(self.cancelUnknown)
     try:
         self.heldTimeout
     except AttributeError:
         self.heldTimeout = 3600
Example #41
0
 def __init__(self, **kwarg):
     tmpLog = core_utils.make_logger(_logger, method_name='__init__')
     PluginBase.__init__(self, **kwarg)
     self.sshUserName = getattr(self, 'sshUserName', None)
     self.sshPassword = getattr(self, 'sshPassword', None)
     self.privateKey = getattr(self, 'privateKey', None)
     self.passPhrase = getattr(self, 'passPhrase', None)
     self.jumpHost = getattr(self, 'jumpHost', None)
     self.jumpPort = getattr(self, 'jumpPort', 22)
     self.remotePort = getattr(self, 'remotePort', 22)
     # is connected only if ssh forwarding works
     self.is_connected = False
     try:
         self._get_connection()
     except Exception as e:
         tmpLog.error('failed to get connection ; {0}: {1}'.format(e.__class__.__name__, e))
     else:
         self.is_connected = True
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     # make logger
     tmpLog = self.make_logger(_logger, 'ThreadID={0}'.format(threading.current_thread().ident),
                               method_name='GlobusBulkPreparator __init__ {} ')
     tmpLog.debug('__init__ start')
     self.thread_id = threading.current_thread().ident
     self.id = GlobusBulkPreparator.next_id
     GlobusBulkPreparator.next_id += 1
     with uLock:
         global uID
         self.dummy_transfer_id = '{0}_{1}'.format(dummy_transfer_id_base, 'XXXX')
         uID += 1
         uID %= harvester_config.preparator.nThreads
     # create Globus Transfer Client
     try:
         self.tc = None
         # need to get client_id and refresh_token from PanDA server via harvester cache mechanism
         tmpLog.debug('about to call dbInterface.get_cache(globus_secret)')
         c_data = self.dbInterface.get_cache('globus_secret')
         if (not c_data == None) and  c_data.data['StatusCode'] == 0 :
             tmpLog.debug('Got the globus_secrets from PanDA')
             self.client_id = c_data.data['publicKey']  # client_id
             self.refresh_token = c_data.data['privateKey'] # refresh_token
             tmpStat, self.tc = globus_utils.create_globus_transfer_client(tmpLog,self.client_id,self.refresh_token)
             if not tmpStat:
                 self.tc = None
                 errStr = 'failed to create Globus Transfer Client'
                 tmpLog.error(errStr)
         else :
             self.client_id = None
             self.refresh_token = None
             self.tc = None
             errStr = 'failed to get Globus Client ID and Refresh Token'
             tmpLog.error(errStr)
     except:
         core_utils.dump_error_message(tmpLog)
     # tmp debugging
     tmpLog.debug('self.id = {0}'.format(self.id))
     tmpLog.debug('self.dummy_transfer_id = {0}'.format(self.dummy_transfer_id))
     # tmp debugging
     tmpLog.debug('__init__ finish')
Example #43
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     _redis_conn_opt_dict = {}
     if hasattr(self, 'redisHost'):
         _redis_conn_opt_dict['host'] = self.redisHost
     elif hasattr(harvester_config.fifo, 'redisHost'):
         _redis_conn_opt_dict['host'] = harvester_config.fifo.redisHost
     if hasattr(self, 'redisPort'):
         _redis_conn_opt_dict['port'] = self.redisPort
     elif hasattr(harvester_config.fifo, 'redisPort'):
         _redis_conn_opt_dict['port'] = harvester_config.fifo.redisPort
     if hasattr(self, 'redisDB'):
         _redis_conn_opt_dict['db'] = self.redisDB
     elif hasattr(harvester_config.fifo, 'redisDB'):
         _redis_conn_opt_dict['db'] = harvester_config.fifo.redisDB
     if hasattr(self, 'redisPassword'):
         _redis_conn_opt_dict['password'] = self.redisPassword
     elif hasattr(harvester_config.fifo, 'redisPassword'):
         _redis_conn_opt_dict['password'] = harvester_config.fifo.redisPassword
     self.qconn = redis.StrictRedis(**_redis_conn_opt_dict)
     self.id_score = '{0}-fifo_id-score'.format(self.titleName)
     self.id_item = '{0}-fifo_id-item'.format(self.titleName)
     self.id_temp = '{0}-fifo_id-temp'.format(self.titleName)
Example #44
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     self.pluginFactory = PluginFactory()
     self.queue_config_mapper = QueueConfigMapper()
     tmpLog = self.make_logger(baseLogger, method_name='__init__')
     tmpLog.info("[{0}] SAGA adaptor will be used.".format(self.adaptor))
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     self.queue_config_mapper = QueueConfigMapper()
Example #46
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     self.tableName = '{title}_FIFO'.format(title=self.titleName)
     # DB access attribues
     if hasattr(self, 'db_host'):
         db_host = self.db_host
     else:
         try:
             db_host = harvester_config.fifo.db_host
         except AttributeError:
             db_host = '127.0.0.1'
     if hasattr(self, 'db_port'):
         db_port = self.db_port
     else:
         try:
             db_port = harvester_config.fifo.db_port
         except AttributeError:
             db_port = 3306
     if hasattr(self, 'db_user'):
         db_user = self.db_user
     else:
         db_user = harvester_config.fifo.db_user
     if hasattr(self, 'db_password'):
         db_password = self.db_password
     else:
         db_password = harvester_config.fifo.db_password
     if hasattr(self, 'db_schema'):
         db_schema = self.db_schema
     else:
         db_schema = harvester_config.fifo.db_schema
     # get connection, cursor and error types
     try:
         import MySQLdb
         import MySQLdb.cursors
     except ImportError:
         try:
             import mysql.connector
         except ImportError:
             raise Exception('No available MySQL DB API installed. Please pip install mysqlclient or mysql-connection-python')
         else:
             self.con = mysql.connector.connect(user=db_user, passwd=db_password,
                                                 db=db_schema, host=db_host, port=db_port)
             self.cur = self.con.cursor(buffered=True)
             self.OperationalError = mysql.connector.errors.OperationalError
     else:
         class MyCursor (MySQLdb.cursors.Cursor):
             def fetchone(self):
                 tmpRet = MySQLdb.cursors.Cursor.fetchone(self)
                 if tmpRet is None:
                     return None
                 return tmpRet
             def fetchall(self):
                 tmpRets = MySQLdb.cursors.Cursor.fetchall(self)
                 return tmpRets
         self.con = MySQLdb.connect(user=db_user, passwd=db_password,
                                     db=db_schema, host=db_host, port=db_port,
                                     cursorclass=MyCursor)
         self.cur = self.con.cursor()
         self.OperationalError = MySQLdb.OperationalError
     # create table for fifo
     try:
         self._make_table()
         # self._make_index()
         self.commit()
     except Exception as _e:
         self.rollback()
         raise _e
Example #47
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     tmpLog = self.make_logger(baseLogger, method_name='__init__')
     tmpLog.info("Titan utils initiated")
Example #48
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        # Set up aCT DB connection
        self.log = core_utils.make_logger(baseLogger, 'aCT submitter', method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
Example #49
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
 def __init__(self, **kwarg):
     self.logBaseURL = 'http://localhost/test'
     PluginBase.__init__(self, **kwarg)
 def __init__(self, **kwarg):
     # logic type : AND: throttled if all rules are satisfied, OR: throttled if one rule is satisfied
     self.logicType = 'OR'
     PluginBase.__init__(self, **kwarg)
     self.dbProxy = DBProxy()
 def __init__(self, **kwarg):
     self.logBaseURL = None
     PluginBase.__init__(self, **kwarg)
     # number of processes
     try:
         self.nProcesses
     except AttributeError:
         self.nProcesses = 1
     else:
         if (not self.nProcesses) or (self.nProcesses < 1):
             self.nProcesses = 1
     # executable file
     try:
         self.executableFile
     except AttributeError:
         self.executableFile = None
     # condor log directory
     try:
         self.logDir
     except AttributeError:
         self.logDir = os.getenv('TMPDIR') or '/tmp'
     # x509 proxy
     try:
         self.x509UserProxy
     except AttributeError:
         self.x509UserProxy = os.getenv('X509_USER_PROXY')
     # ATLAS AGIS
     try:
         self.useAtlasAGIS = bool(self.useAtlasAGIS)
     except AttributeError:
         self.useAtlasAGIS = False
     # ATLAS Grid CE, requiring AGIS
     try:
         self.useAtlasGridCE = bool(self.useAtlasGridCE)
     except AttributeError:
         self.useAtlasGridCE = False
     finally:
         self.useAtlasAGIS = self.useAtlasAGIS or self.useAtlasGridCE
     # sdf template directories of CEs
     try:
         self.CEtemplateDir
     except AttributeError:
         self.CEtemplateDir = ''
     # remote condor schedd and pool name (collector), and spool option
     try:
         self.condorSchedd
     except AttributeError:
         self.condorSchedd = None
     try:
         self.condorPool
     except AttributeError:
         self.condorPool = None
     # condor spool mechanism. If False, need shared FS across remote schedd
     try:
         self.useSpool
     except AttributeError:
         self.useSpool = False
     # number of workers less than this number will be bulkily submitted in only one schedd
     try:
         self.minBulkToRamdomizedSchedd
     except AttributeError:
         self.minBulkToRamdomizedSchedd = 20
     # record of information of CE statistics
     self.ceStatsLock = threading.Lock()
     self.ceStats = dict()
    def __init__(self, **kwarg):
        self.logBaseURL = 'http://localhost/test'
        PluginBase.__init__(self, **kwarg)

        self.queue_config_mapper = QueueConfigMapper()
Example #54
0
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     tmpLog = core_utils.make_logger(baseLogger, method_name='__init__')
     tmpLog.info("[{0}] SAGA adaptor will be used".format(self.adaptor))
Example #55
0
    def __init__(self, **kwarg):
        PluginBase.__init__(self, **kwarg)

        self.log = core_utils.make_logger(baseLogger, 'aCT sweeper', method_name='__init__')
        self.actDB = aCTDBPanda(self.log)
 def __init__(self, **kwarg):
     PluginBase.__init__(self, **kwarg)
     self.nProcesses = 4
     self.vm_client = OS_SimpleClient(auth_config_json_file=self.authConfigFile)
 def __init__(self, **kwarg):
     self.n_threads = 3
     PluginBase.__init__(self, **kwarg)
     if self.n_threads < 1:
         self.n_threads = 1