def bootstrap(): global servicesEnabled servicesEnabled = True #export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('reactivate',enableInternalServices,'Functions')
def insertClassIntoGPI(class_): """ Insert a given class object into the GPI Args: class_ (class): This is the class object to insert into the GPI """ exportToGPI(_getName(class_), class_, "Objects") allPlugins.add(class_, class_._category, _getName(class_)) allPlugins._prev_found = {}
def bootstrap(reg, interactive_session): """ Create local subsystems. In the future this procedure should be enhanced to connect to remote subsystems. FIXME: this procedure should be moved to the Runtime package. This function will change the default value of autostart of the monitoring, depending if the session is interactive or batch. The autostart value may be overriden in the config file, so warn if it differs from the default. """ from Ganga.Core.MonitoringComponent.Local_GangaMC_Service import JobRegistry_Monitor, config from Ganga.Utility.logging import getLogger logger = getLogger() from Ganga.Core.GangaThread import GangaThreadPool # start the internal services coordinator from Ganga.Core.InternalServices import Coordinator Coordinator.bootstrap() # backend-specific setup (e.g. Remote: setup any remote ssh pipes) # for j in reg: # if hasattr(j,'status') and j.status in ['submitted','running']: # if hasattr(j,'backend'): # protect: EmptyGangaObject does not have backend either # if hasattr(j.backend,'setup'): # protect: EmptyGangaObject does not have setup() method # j.backend.setup() start_jobregistry_monitor(reg) # register the MC shutdown hook change_atexitPolicy(interactive_session) # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('runMonitoring', monitoring_component.runMonitoring, 'Functions') autostart_default = interactive_session config.overrideDefaultValue('autostart', bool(autostart_default)) if config['autostart'] is not autostart_default: msg = 'monitoring loop %s (the default setting for %s session is %s)' val = { True: ('enabled', 'batch', 'disabled'), False: ('disabled', 'interactive', 'enabled') } logger.warning(msg % val[config['autostart']]) if config['autostart']: monitoring_component.enableMonitoring()
def bootstrap(reg, interactive_session): """ Create local subsystems. In the future this procedure should be enhanced to connect to remote subsystems. FIXME: this procedure should be moved to the Runtime package. This function will change the default value of autostart of the monitoring, depending if the session is interactive or batch. The autostart value may be overriden in the config file, so warn if it differs from the default. """ from Ganga.Core.MonitoringComponent.Local_GangaMC_Service import JobRegistry_Monitor, config from Ganga.Utility.logging import getLogger logger = getLogger() from Ganga.Core.GangaThread import GangaThreadPool # start the internal services coordinator from Ganga.Core.InternalServices import Coordinator Coordinator.bootstrap() # backend-specific setup (e.g. Remote: setup any remote ssh pipes) # for j in reg: # if hasattr(j,'status') and j.status in ['submitted','running']: # if hasattr(j,'backend'): # protect: EmptyGangaObject does not have backend either # if hasattr(j.backend,'setup'): # protect: EmptyGangaObject does not have setup() method # j.backend.setup() start_jobregistry_monitor(reg) # register the MC shutdown hook change_atexitPolicy(interactive_session) # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI( 'runMonitoring', monitoring_component.runMonitoring, 'Functions') autostart_default = interactive_session config.overrideDefaultValue('autostart', bool(autostart_default)) if config['autostart'] is not autostart_default: msg = 'monitoring loop %s (the default setting for %s session is %s)' val = {True: ('enabled', 'batch', 'disabled'), False: ('disabled', 'interactive', 'enabled')} logger.warning(msg % val[config['autostart']]) if config['autostart']: monitoring_component.enableMonitoring()
def loadPlugins(c): import os,sys from Ganga.Utility.logging import getLogger logger = getLogger() logger.info('You are now using Python %s',sys.version.split()[0]) import GangaPlotter.Plotter from Plotter.GangaPlotter import GangaPlotter #GangaPlotter.Plotter.plotter = GangaPlotter() plotter = GangaPlotter() from Ganga.Runtime.GPIexport import exportToGPI #exportToGPI('plotter',GangaPlotter.Plotter.plotter,'Objects','graphics plotter') exportToGPI('plotter',plotter,'Objects','graphics plotter')
def startUpQueues(): from Ganga.Utility.logging import getLogger logger = getLogger() global _global_queues if _global_queues is None: logger.debug("Starting Queues") # start queues from Ganga.Runtime.GPIexport import exportToGPI from Ganga.Core.GangaThread.WorkerThreads.ThreadPoolQueueMonitor import ThreadPoolQueueMonitor _global_queues = ThreadPoolQueueMonitor() exportToGPI('queues', _global_queues, 'Objects') import atexit atexit.register((-10., shutDownQueues)) else: logger.error("Cannot Start queues if they've already started")
def bootstrap(): #global servicesEnabled #servicesEnabled = True # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('reactivate', enableInternalServices, 'Functions') exportToGPI('disableMonitoring', disableMonitoringService, 'Functions') exportToGPI('enableMonitoring', enableMonitoringService, 'Functions') exportToGPI('disableServices', disableInternalServices, 'Functions') servicesEnabled = True
def bootstrap(): # global servicesEnabled # servicesEnabled = True # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI("reactivate", enableInternalServices, "Functions") exportToGPI("disableMonitoring", disableMonitoringService, "Functions") exportToGPI("enableMonitoring", enableMonitoringService, "Functions") exportToGPI("disableServices", disableInternalServices, "Functions") servicesEnabled = True
def startUpRegistries(): # Startup the registries and export them to the GPI, also add jobtree and shareref from Ganga.Runtime.GPIexport import exportToGPI # import default runtime modules # bootstrap user-defined runtime modules and enable transient named # template registries # bootstrap runtime modules from Ganga.GPIDev.Lib.JobTree import TreeError for n, k, d in bootstrap(): # make all repository proxies visible in GPI exportToGPI(n, k, 'Objects', d) # JobTree from Ganga.Core.GangaRepository import getRegistry jobtree = getRegistry("jobs").getJobTree() exportToGPI('jobtree', jobtree, 'Objects', 'Logical tree view of the jobs') exportToGPI('TreeError', TreeError, 'Exceptions') # ShareRef shareref = getRegistry("prep").getShareRef() exportToGPI('shareref', shareref, 'Objects', 'Mechanism for tracking use of shared directory resources')
else: logger.error("LFN doesn't match namePattern for file: %s" % str(self.namePattern)) return False elif len(self.subfiles) > 0 and regex.search(self.namePattern) is not None: return True else: logger.error("Failed to Match file:\n%s" % str(self)) return False @staticmethod def diracLFNBase(): """ Compute a sensible default LFN base name If ``DiracLFNBase`` has been defined, use that. Otherwise, construct one from the user name and the user VO """ if configDirac["DiracLFNBase"]: return configDirac["DiracLFNBase"] return "/{0}/user/{1}/{2}".format(configDirac["userVO"], config["user"][0], config["user"]) # add DiracFile objects to the configuration scope (i.e. it will be # possible to write instatiate DiracFile() objects via config file) import Ganga.Utility.Config Ganga.Utility.Config.config_scope["DiracFile"] = DiracFile from Ganga.Runtime.GPIexport import exportToGPI exportToGPI("GangaDirac", GangaList, "Classes")
""" import Ganga.Utility.logging from Ganga.GPIDev.Base.Proxy import addProxy logger = Ganga.Utility.logging.getLogger() try: from GangaLHCb.Lib.Backends.Bookkeeping import Bookkeeping from Ganga.GPI import LHCbDataset except ImportError: logger.warning("Could not start Bookkeeping Browser") return None bkk = Bookkeeping() return addProxy(bkk.browse(gui)) exportToGPI("browseBK", browseBK, "Functions") def fixBKQueryInBox(newCategory="query"): import os from Ganga.Utility.Config import getConfig def _filt(line): return 'class name="BKQuery"' in line gangadir = getConfig("Configuration")["gangadir"] logger.info("found gangadir = " + gangadir) for root, dirs, files in os.walk(gangadir): if "data" in files and "box" in root and not "box." in root: path = os.path.join(root, "data") logger.info("looking at " + path)
jobs((10,2)) : get subjobs number 2 of job 10 if exist or raise exception. jobs('10.2')) : same as above """ return stripProxy(self).__call__(x) def __getslice__(self, i1, i2): """ Get a slice. Examples: jobs[2:] : get first two jobs, jobs[-10:] : get last 10 jobs. """ return _wrap(stripProxy(self).__getslice__(i1, i2)) def __getitem__(self, x): """ Get a job by positional index. Examples: jobs[-1] : get last job, jobs[0] : get first job, jobs[1] : get second job. """ return _wrap(stripProxy(self).__getitem__(_unwrap(x))) def jobSlice(joblist): """create a 'JobSlice' from a list of jobs example: jobSlice([j for j in jobs if j.name.startswith("T1:")])""" this_slice = JobRegistrySlice("manual slice") this_slice.objects = oDict([(j.fqid, _unwrap(j)) for j in joblist]) return _wrap(this_slice) # , "Create a job slice from a job list") exportToGPI("jobSlice", jobSlice, "Functions")
# this will simply return 87 diracAPI(\'print 87\') # this will return the status of job 66 # note a Dirac() object is already provided set up as \'dirac\' diracAPI(\'print Dirac().status([66])\') diracAPI(\'print dirac.status([66])\') # or can achieve the same using command defined and included from # getConfig('DIRAC')['DiracCommandFiles'] diracAPI(\'status([66])\') ''' return execute(cmd, timeout=timeout) exportToGPI('diracAPI', diracAPI, 'Functions') #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/# def diracAPI_interactive(connection_attempts=5): ''' Run an interactive server within the DIRAC environment. ''' import os import sys import time import inspect import traceback from GangaDirac.Lib.Server.InspectionClient import runClient serverpath = os.path.join(os.path.dirname(inspect.getsourcefile(runClient)), 'InspectionServer.py')
if not cred.is_valid(): cred.create() except KeyError: credential_store.create(DiracProxy()) @classmethod def create(cls): """ This is a wrapper for:: credential_store.create(DiracProxy()) """ cls.renew() @classmethod def destroy(cls): """ This is a wrapper for:: credential_store[DiracProxy()].destroy() """ from Ganga.GPI import credential_store, DiracProxy try: cred = credential_store[DiracProxy()] cred.destroy() except KeyError: pass exportToGPI('gridProxy', gridProxy, 'Functions')
jobs((10,2)) : get subjobs number 2 of job 10 if exist or raise exception. jobs('10.2')) : same as above """ return _wrap(stripProxy(self).__call__(x)) def __getslice__(self, i1, i2): """ Get a slice. Examples: jobs[2:] : get first two jobs, jobs[-10:] : get last 10 jobs. """ return _wrap(stripProxy(self).__getslice__(i1, i2)) def __getitem__(self, x): """ Get a job by positional index. Examples: jobs[-1] : get last job, jobs[0] : get first job, jobs[1] : get second job. """ return _wrap(stripProxy(self).__getitem__(_unwrap(x))) def jobSlice(joblist): """create a 'JobSlice' from a list of jobs example: jobSlice([j for j in jobs if j.name.startswith("T1:")])""" slice = JobRegistrySlice("manual slice") slice.objects = oDict([(j.fqid, _unwrap(j)) for j in joblist]) return _wrap(slice) # , "Create a job slice from a job list") exportToGPI("jobSlice", jobSlice, "Functions")
path = tempfile.mkdtemp(prefix='GANGA_') logger.info( 'Bender application will be prepared in the temporary directory %s' % path) the_path = prepare_cmake_app(project, version, path) # scripts = kwarsg.pop('scripts', []) if script: scripts.append(script) # from Ganga.GPI import OstapRun as _OR ## black magic return _OR(scripts=scripts, directory=the_path, **kwargs) ## export it! from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('prepareBender', prepareBender, 'Functions') exportToGPI('prepareBenderRun', prepareBenderRun, 'Functions') exportToGPI('prepareOstapRun', prepareOstapRun, 'Functions') ## for app in ('BenderModule', 'BenderRun', 'OstapRun'): allHandlers.add(app, 'Dirac', GaudiExecDiracRTHandler) for backend in ("Local", "Interactive", "LSF", "PBS", "SGE", "Condor"): allHandlers.add(app, backend, GaudiExecRTHandler) # ============================================================================= # The END # =============================================================================
try: os.kill(pid, 0) except OSError: os.unlink(pidfile) return None # ok! return pid getpid = classmethod(getpid) def putpid(cls): '''Store current PID to file; should only be called in daemon process.''' if LGI.getpid() is not None: LGI.pilot.log.warning('daemon already running, not overwriting pid file') return False f = open(Config.getConfig('LGI')['PidFile'], 'w') f.write(str(os.getpid())) f.close() return True putpid = classmethod(putpid) def delpid(cls): '''Remove current PID file; should only be called at end of daemon process.''' os.unlink(Config.getConfig('LGI')['PidFile']) delpid = classmethod(delpid) # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('LGI', LGI, 'Objects')
- use another project ('Analysis' is minimal one...) >>> j = Job ( application = prepareOstapRun ( project = 'Analysis' , version = 'v18r0' , script = 'the_path/the_script.py' , commands = [] ) ) """ if use_tmp or not path : path = tempfile.mkdtemp ( prefix = 'GANGA_' ) logger.info ( 'Bender application will be prepared in the temporary directory %s' % path ) the_path = prepare_cmake_app ( project , version , path ) # scripts = kwarsg.pop('scripts', [] ) if script : scripts.append ( script ) # from Ganga.GPI import OstapRun as _OR ## black magic return _OR ( scripts = scripts , directory = the_path , **kwargs ) ## export it! from Ganga.Runtime.GPIexport import exportToGPI exportToGPI ( 'prepareBender' , prepareBender , 'Functions' ) exportToGPI ( 'prepareBenderRun' , prepareBenderRun , 'Functions' ) exportToGPI ( 'prepareOstapRun' , prepareOstapRun , 'Functions' ) ## for app in ( 'BenderModule' , 'BenderRun' , 'OstapRun' ) : allHandlers.add ( app , 'Dirac', GaudiExecDiracRTHandler) for backend in ("Local","Interactive","LSF","PBS","SGE","Condor"): allHandlers.add ( app , backend, GaudiExecRTHandler) # ============================================================================= # The END # =============================================================================
out.write(str_val) return if (maxLen != -1) and (self_len > maxLen): out.write(decorateListEntries(self_len, getName(type(self[0])))) return else: summary_print(self, out) return out.write(str(self._list)) return def toString(self): """Returns a simple str of the _list.""" returnable_str = "[" for element in self._list: if isType(element, GangaObject): returnable_str += repr(stripProxy(element)) else: returnable_str += "'" returnable_str += str(stripProxy(element)) returnable_str += "'" returnable_str += ", " returnable_str += "]" return returnable_str from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('GangaList', GangaList, 'Classes')
if thisSE not in SEs: SEs.append(thisSE) myURLs = [] # If an SE is specified, move it to be the first element in the list to be processed. if defaultSE != '': if defaultSE in SEs: SEs.remove(defaultSE) SEs.insert(0, defaultSE) else: logger.warning( 'No replica at specified SE, here is a URL for another replica' ) remainingLFNs = list(lfnList) # Loop over the possible SEs and get the URLs of the files stored there. # Remove the successfully found ones from the list and move on to the next SE. for SE in SEs: lfns = remainingLFNs thisSEFiles = execute('getAccessURL(%s, "%s", %s)' % (lfns, SE, protocol))['Successful'] for lfn in thisSEFiles.keys(): myURLs.append(thisSEFiles[lfn]) remainingLFNs.remove(lfn) # If we gotten to the end of the list then break if not remainingLFNs: break return myURLs from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('getAccessURLs', getAccessURLs, 'Functions')
# them in the jobs inputdata field, ready for submission j.inputdata=browseBK() """ import Ganga.Utility.logging from Ganga.GPIDev.Base.Proxy import addProxy logger = Ganga.Utility.logging.getLogger() try: from GangaLHCb.Lib.DIRAC.Bookkeeping import Bookkeeping from Ganga.GPI import LHCbDataset except ImportError: logger.warning('Could not start Bookkeeping Browser') return None bkk = Bookkeeping() return addProxy(bkk.browse(gui)) exportToGPI('browseBK',browseBK,'Functions') def diracAPI(cmd,timeout=None): '''Execute DIRAC API commands from w/in Ganga. The value of local variable \"result\" will be returned, e.g.: # this will simply return 87 diracAPI(\'result = 87\') # this will return the status of job 66 diracAPI(\'result = Dirac().status(66)\') If \"result\" is not set, then the commands are still executed but no value is returned. '''
# use a dummy file to keep the parser happy if len(optsfiles) == 0: raise GangaException("Need a file to parse to call this method") try: parser = PythonOptsCmakeParser(optsfiles, app) except Exception as err: msg = 'Unable to parse the job options. Please check options files and extraopts.' logger.error("PythonOptsCmakeParserError:\n%s" % str(err)) raise ApplicationConfigurationError(None, msg) return parser.get_input_data() exportToGPI('getGaudiExecInputData', getGaudiExecInputData, 'Functions') def prepare_cmake_app(myApp, myVer, myPath='$HOME/cmtuser', myGetpack=None): """ Short helper function for setting up minimal application environments on disk for job submission Args: myApp (str): This is the name of the app to pass to lb-dev myVer (str): This is the version of 'myApp' to pass tp lb-dev myPath (str): This is where lb-dev will be run myGepPack (str): This is a getpack which will be run once the lb-dev has executed """ full_path = expandfilename(myPath, True) if not path.exists(full_path): makedirs(full_path) chdir(full_path)
diracAPI(\'print 87\') # this will return the status of job 66 # note a Dirac() object is already provided set up as \'dirac\' diracAPI(\'print Dirac().status([66])\') diracAPI(\'print dirac.status([66])\') # or can achieve the same using command defined and included from # getConfig('DIRAC')['DiracCommandFiles'] diracAPI(\'status([66])\') ''' return execute(cmd, timeout=timeout) exportToGPI('diracAPI', diracAPI, 'Functions') #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/# def diracAPI_interactive(connection_attempts=5): ''' Run an interactive server within the DIRAC environment. ''' import os import sys import time import inspect import traceback from GangaDirac.Lib.Server.InspectionClient import runClient serverpath = os.path.join(
jobs((10,2)) : get subjobs number 2 of job 10 if exist or raise exception. jobs('10.2')) : same as above """ return _wrap(self._impl.__call__(x)) def __getslice__(self, i1,i2): """ Get a slice. Examples: jobs[2:] : get first two jobs, jobs[-10:] : get last 10 jobs. """ return _wrap(self._impl.__getslice__(i1,i2)) def __getitem__(self,x): """ Get a job by positional index. Examples: jobs[-1] : get last job, jobs[0] : get first job, jobs[1] : get second job. """ return _wrap(self._impl.__getitem__(_unwrap(x))) from Ganga.Utility.external.ordereddict import oDict def jobSlice(joblist): """create a 'JobSlice' from a list of jobs example: jobSlice([j for j in jobs if j.name.startswith("T1:")])""" slice = JobRegistrySlice("manual slice") slice.objects = oDict([(j.fqid, _unwrap(j)) for j in joblist]) return _wrap(slice) from Ganga.Runtime.GPIexport import exportToGPI exportToGPI("jobSlice", jobSlice, "Functions")#, "Create a job slice from a job list")
def bootstrap(reg, interactive_session): """ Create local subsystems. In the future this procedure should be enhanced to connect to remote subsystems. FIXME: this procedure should be moved to the Runtime package. This function will change the default value of autostart of the monitoring, depending if the session is interactive or batch. The autostart value may be overriden in the config file, so warn if it differs from the default. """ from Ganga.Core.MonitoringComponent.Local_GangaMC_Service import JobRegistry_Monitor, config config.addOption('forced_shutdown_policy', 'session_type', 'If there are remaining background activities at exit such as monitoring, output download Ganga will attempt to wait for the activities to complete. You may select if a user is prompted to answer if he wants to force shutdown ("interactive") or if the system waits on a timeout without questions ("timeout"). The default is "session_type" which will do interactive shutdown for CLI and timeout for scripts.') config.addOption('forced_shutdown_timeout', 60, "Timeout in seconds for forced Ganga shutdown in batch mode.") config.addOption('forced_shutdown_prompt_time', 10, "User will get the prompt every N seconds, as specified by this parameter.") config.addOption('forced_shutdown_first_prompt_time', 5, "User will get the FIRST prompt after N seconds, as specified by this parameter. This parameter also defines the time that Ganga will wait before shutting down, if there are only non-critical threads alive, in both interactive and batch mode.") from Ganga.Utility.logging import getLogger logger = getLogger() from Ganga.Core.GangaThread import GangaThreadPool # start the internal services coordinator from Ganga.Core.InternalServices import Coordinator Coordinator.bootstrap() # backend-specific setup (e.g. Remote: setup any remote ssh pipes) # for j in reg: # if hasattr(j,'status') and j.status in ['submitted','running']: # if hasattr(j,'backend'): # protect: EmptyGangaObject does not have backend either # if hasattr(j.backend,'setup'): # protect: EmptyGangaObject does not have setup() method # j.backend.setup() # start the monitoring loop global monitoring_component monitoring_component = JobRegistry_Monitor(reg) monitoring_component.start() # register the MC shutdown hook change_atexitPolicy(interactive_session) # export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI( 'runMonitoring', monitoring_component.runMonitoring, 'Functions') autostart_default = interactive_session config.overrideDefaultValue('autostart', bool(autostart_default)) if config['autostart'] is not autostart_default: msg = 'monitoring loop %s (the default setting for %s session is %s)' val = {True: ('enabled', 'batch', 'disabled'), False: ('disabled', 'interactive', 'enabled')} logger.warning(msg % val[config['autostart']]) if config['autostart']: monitoring_component.enableMonitoring()
# Get the SEs SEs = [] for lf in reps['Successful']: for thisSE in reps['Successful'][lf].keys(): if thisSE not in SEs: SEs.append(thisSE) myURLs = [] # If an SE is specified, move it to be the first element in the list to be processed. if defaultSE != '': if defaultSE in SEs: SEs.remove(defaultSE) SEs.insert(0, defaultSE) else: logger.warning('No replica at specified SE, here is a URL for another replica') remainingLFNs = list(lfnList) # Loop over the possible SEs and get the URLs of the files stored there. # Remove the successfully found ones from the list and move on to the next SE. for SE in SEs: lfns = remainingLFNs thisSEFiles = execute('getAccessURL(%s, "%s", %s)' % (lfns, SE, protocol), cred_req=credential_requirements)['Successful'] for lfn in thisSEFiles.keys(): myURLs.append(thisSEFiles[lfn]) remainingLFNs.remove(lfn) # If we gotten to the end of the list then break if not remainingLFNs: break return myURLs from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('getAccessURLs', getAccessURLs, 'Functions')
optsfiles = [optsfiles] # use a dummy file to keep the parser happy if len(optsfiles) == 0: raise GangaException("Need a file to parse to call this method") try: parser = PythonOptsCmakeParser(optsfiles, app) except Exception as err: msg = 'Unable to parse the job options. Please check options files and extraopts.' logger.error("PythonOptsCmakeParserError:\n%s" % str(err)) raise ApplicationConfigurationError(None, msg) return parser.get_input_data() exportToGPI('getGaudiExecInputData', getGaudiExecInputData, 'Functions') def prepare_cmake_app(myApp, myVer, myPath='$HOME/cmtuser', myGetpack=None): """ Short helper function for setting up minimal application environments on disk for job submission Args: myApp (str): This is the name of the app to pass to lb-dev myVer (str): This is the version of 'myApp' to pass tp lb-dev myPath (str): This is where lb-dev will be run myGepPack (str): This is a getpack which will be run once the lb-dev has executed """ full_path = expandfilename(myPath, True) if not path.exists(full_path): makedirs(full_path) chdir(full_path) _exec_cmd('lb-dev %s %s' % (myApp, myVer), full_path)
if (maxLen != -1) and (self_len > maxLen): out.write( decorateListEntries(self_len, type(self[0]).__name__)) return else: summary_print(self, out) return out.write(str(self._list)) return def toString(self): """Returns a simple str of the _list.""" returnable_str = "[" for element in self._list: if isType( element, GangaObject): returnable_str += repr(stripProxy(element)) else: returnable_str += "'" returnable_str += str(stripProxy(element)) returnable_str += "'" returnable_str += ", " returnable_str += "]" return returnable_str from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('GangaList', GangaList, 'Classes')
def getEnvironment(c): import PACKAGE PACKAGE.standardSetup() return {} def loadPlugins(c): pass from GangaTutorial.Lib.primes.primes import check_prime_job, split_prime_job from Ganga.Runtime.GPIexport import exportToGPI from Ganga.Utility.logging import getLogger logger = getLogger(modulename=True) exportToGPI("check_prime_job",check_prime_job,"Functions") exportToGPI("split_prime_job",split_prime_job,"Functions") import os, GangaTutorial TUTDIR = os.path.dirname(GangaTutorial.__file__) exportToGPI("TUTDIR",TUTDIR,"Objects") del os,GangaTutorial logger.debug("*** Ganga Tutorial Loaded OK ***")
# them in the jobs inputdata field, ready for submission j.inputdata=browseBK() """ import Ganga.Utility.logging from Ganga.GPIDev.Base.Proxy import addProxy logger = Ganga.Utility.logging.getLogger() try: from GangaLHCb.Lib.Backends.Bookkeeping import Bookkeeping from GangaLHCb.Lib.LHCbDataset.LHCbDataset import LHCbDataset except ImportError: logger.warning('Could not start Bookkeeping Browser') return None bkk = Bookkeeping() return addProxy(bkk.browse(gui)) exportToGPI('browseBK', browseBK, 'Functions') def fixBKQueryInBox(newCategory='query'): import os from Ganga.Utility.Config import getConfig def _filt(line): return 'class name=\"BKQuery\"' in line gangadir = getConfig('Configuration')['gangadir'] logger.info('found gangadir = ' + gangadir) for root, dirs, files in os.walk(gangadir): if 'data' in files and 'box' in root and not 'box.' in root: path = os.path.join(root, 'data') logger.info("looking at " + path)
def getEnvironment(c): import PACKAGE PACKAGE.standardSetup() return {} def loadPlugins(c): pass from GangaTutorial.Lib.primes.primes import check_prime_job, split_prime_job from Ganga.Runtime.GPIexport import exportToGPI from Ganga.Utility.logging import getLogger logger = getLogger(modulename=True) exportToGPI("check_prime_job", check_prime_job, "Functions") exportToGPI("split_prime_job", split_prime_job, "Functions") import os, GangaTutorial TUTDIR = os.path.dirname(GangaTutorial.__file__) exportToGPI("TUTDIR", TUTDIR, "Objects") del os, GangaTutorial logger.debug("*** Ganga Tutorial Loaded OK ***")
logger.error("LFN doesn't match namePattern for file: %s" % str(self.namePattern)) return False elif len(self.subfiles) > 0 and regex.search( self.namePattern) is not None: return True else: logger.error("Failed to Match file:\n%s" % str(self)) return False @staticmethod def diracLFNBase(credential_requirements): """ Compute a sensible default LFN base name If ``DiracLFNBase`` has been defined, use that. Otherwise, construct one from the user name and the user VO Args: credential_requirements (DiracProxy): This is the credential which governs how we should format the path """ if configDirac['DiracLFNBase']: return configDirac['DiracLFNBase'] user = DiracProxyInfo(credential_requirements).username return '/{0}/user/{1}/{2}'.format(configDirac['userVO'], user[0], user) # add DiracFile objects to the configuration scope (i.e. it will be # possible to write instatiate DiracFile() objects via config file) Ganga.Utility.Config.config_scope['DiracFile'] = DiracFile exportToGPI('GangaDirac', GangaList, 'Classes')
from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('lib_tested_correctly', 1, 'Objects')
logger.info("Cleaning Shared folders in: %s" % share_path) logger.info("This may take a few minutes if you're running this for the first time after 6.1.23, feel free to go grab a tea/coffee") for item in os.listdir(share_path): this_dir = os.path.join(share_path, item) if os.path.isdir(this_dir): # NB we do need to explicitly test the length of the returned value here # Checking is __MUCH__ faster than trying and failing to remove folders with contents on AFS if len(os.listdir(this_dir)) == 0: try: os.rmdir(this_dir) except OSError: logger.debug("Failed to remove: %s" % this_dir) exportToGPI('cleanUpShareDirs', cleanUpShareDirs, 'Functions') # # # $Log: not supported by cvs2svn $ # Revision 1.1 2008/07/17 16:40:53 moscicki # migration of 5.0.2 to HEAD # # the doc and release/tools have been taken from HEAD # # Revision 1.15 2007/08/24 15:55:03 moscicki # added executable flag to the file, ganga will set the executable mode of the app.exe file (in the sandbox only, the original file is not touched), this is to solve feature request #24452 # # Revision 1.14 2007/01/25 16:18:21 moscicki # mergefrom_Ganga-4-2-2-bugfix-branch_25Jan07 (GangaBase-4-14) #
def bootstrap(reg, interactive_session): """ Create local subsystems. In the future this procedure should be enhanced to connect to remote subsystems. FIXME: this procedure should be moved to the Runtime package. This function will change the default value of autostart of the monitoring, depending if the session is interactive or batch. The autostart value may be overriden in the config file, so warn if it differs from the default. """ from Ganga.Core.MonitoringComponent.Local_GangaMC_Service import JobRegistry_Monitor, config config.addOption('forced_shutdown_policy','session_type','If there are remaining background activities at exit such as monitoring, output download Ganga will attempt to wait for the activities to complete. You may select if a user is prompted to answer if he wants to force shutdown ("interactive") or if the system waits on a timeout without questions ("timeout"). The default is "session_type" which will do interactive shutdown for CLI and timeout for scripts.') config.addOption('forced_shutdown_timeout',60,"Timeout in seconds for forced Ganga shutdown in batch mode.") config.addOption('forced_shutdown_prompt_time',10,"User will get the prompt every N seconds, as specified by this parameter.") config.addOption('forced_shutdown_first_prompt_time',5,"User will get the FIRST prompt after N seconds, as specified by this parameter. This parameter also defines the time that Ganga will wait before shutting down, if there are only non-critical threads alive, in both interactive and batch mode.") from Ganga.Utility.logging import getLogger logger = getLogger() from Ganga.Core.GangaThread import GangaThreadPool # create generic Ganga thread pool thread_pool = GangaThreadPool.getInstance() #start the internal services coordinator from Ganga.Core.InternalServices import Coordinator,ShutdownManager Coordinator.bootstrap() #load the shutdown manager #ShutdownManager.install() # backend-specific setup (e.g. Remote: setup any remote ssh pipes) for j in reg: if hasattr(j,'status') and j.status in ['submitted','running']: if hasattr(j,'backend'): # protect: EmptyGangaObject does not have backend either if hasattr(j.backend,'setup'): # protect: EmptyGangaObject does not have setup() method j.backend.setup() #start the monitoring loop global monitoring_component monitoring_component = JobRegistry_Monitor( reg ) monitoring_component.start() #register the MC shutdown hook import atexit def should_wait_interactive_cb(t_total, critical_thread_ids, non_critical_thread_ids): global t_last if t_last is None: t_last = -time.time() # if there are critical threads then prompt user or wait depending on configuration if critical_thread_ids: if ((t_last<0 and time.time()+t_last > config['forced_shutdown_first_prompt_time']) or (t_last>0 and time.time()-t_last > config['forced_shutdown_prompt_time'])): msg = """Job status update or output download still in progress (shutdown not completed after %d seconds). %d background thread(s) still running: %s. Do you want to force the exit (y/[n])? """ % (t_total, len(critical_thread_ids), critical_thread_ids) resp = raw_input(msg) t_last = time.time() return resp.lower() != 'y' else: return True # if there are non-critical threads then wait or shutdown depending on configuration elif non_critical_thread_ids: if t_total < config['forced_shutdown_first_prompt_time']: return True else: return False # if there are no threads then shutdown else: return False def should_wait_batch_cb(t_total, critical_thread_ids, non_critical_thread_ids): # if there are critical threads then wait or shutdown depending on configuration if critical_thread_ids: if t_total < config['forced_shutdown_timeout']: return True else: logger.warning('Shutdown was forced after waiting for %d seconds for background activities to finish (monitoring, output download, etc). This may result in some jobs being corrupted.',t_total) return False # if there are non-critical threads then wait or shutdown depending on configuration elif non_critical_thread_ids: if t_total < config['forced_shutdown_first_prompt_time']: return True else: return False # if there are no threads then shutdown else: return False #register the exit function with the highest priority (==0) #atexit.register((0,monitoring_component.stop), fail_cb=mc_fail_cb,max_retries=config['max_shutdown_retries']) #select the shutdown method based on configuration and/or session type forced_shutdown_policy = config['forced_shutdown_policy'] if forced_shutdown_policy == 'interactive': should_wait_cb = should_wait_interactive_cb else: if forced_shutdown_policy == 'batch': should_wait_cb = should_wait_batch_cb else: if interactive_session: should_wait_cb = should_wait_interactive_cb else: should_wait_cb = should_wait_batch_cb atexit.register((0,thread_pool.shutdown), should_wait_cb=should_wait_cb) #export to GPI from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('runMonitoring',monitoring_component.runMonitoring,'Functions') autostart_default = interactive_session config.overrideDefaultValue('autostart',bool(autostart_default)) if config['autostart'] is not autostart_default: msg = 'monitoring loop %s (the default setting for %s session is %s)' val = { True : ('enabled', 'batch', 'disabled'), False: ('disabled', 'interactive', 'enabled')} logger.warning(msg%val[config['autostart']]) if config['autostart']: monitoring_component.enableMonitoring() # THIS IS FOR DEBUGGING ONLY import time class Stuck(GangaThread.GangaThread): def __init__(self): GangaThread.GangaThread.__init__(self,name='Stuck') def run(self): i = 0 while i < 10: time.sleep(3) #print '*'*30,i i += 1 def stop(self): print "I was asked to stop..."
'###LOCATIONSFILE###' : postProcessLocationsFP, '###DIRAC_ENV###' : script_env} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script def hasMatchedFiles(self): if self.lfn != "" and self.namePattern != "": if self.namePattern == os.path.basename(self.lfn): return True else: logger.error("LFN doesn't match namePattern for file: %s" % str(self.namePattern)) return False elif len(self.subfiles) > 0 and regex.search(self.namePattern) is not None: return True else: logger.error("Failed to Match file:\n%s" % str(self)) return False # add DiracFile objects to the configuration scope (i.e. it will be # possible to write instatiate DiracFile() objects via config file) import Ganga.Utility.Config Ganga.Utility.Config.config_scope['DiracFile'] = DiracFile from Ganga.Runtime.GPIexport import exportToGPI exportToGPI('GangaDirac', GangaList, 'Classes')
"This may take a few minutes if you're running this for the first time after 6.1.23, feel free to go grab a tea/coffee" ) for item in os.listdir(share_path): this_dir = os.path.join(share_path, item) if os.path.isdir(this_dir): # NB we do need to explicitly test the length of the returned value here # Checking is __MUCH__ faster than trying and failing to remove folders with contents on AFS if len(os.listdir(this_dir)) == 0: try: os.rmdir(this_dir) except OSError: logger.debug("Failed to remove: %s" % this_dir) exportToGPI('cleanUpShareDirs', cleanUpShareDirs, 'Functions') # # # $Log: not supported by cvs2svn $ # Revision 1.1 2008/07/17 16:40:53 moscicki # migration of 5.0.2 to HEAD # # the doc and release/tools have been taken from HEAD # # Revision 1.15 2007/08/24 15:55:03 moscicki # added executable flag to the file, ganga will set the executable mode of the app.exe file (in the sandbox only, the original file is not touched), this is to solve feature request #24452 # # Revision 1.14 2007/01/25 16:18:21 moscicki # mergefrom_Ganga-4-2-2-bugfix-branch_25Jan07 (GangaBase-4-14) #