Exemple #1
0
def ganga_job_submitted(application_name, backend_name, plain_job, master_job, sub_jobs):
    host = getConfig('System')['GANGA_HOSTNAME']
    user = getConfig('Configuration')['user']
    runtime_packages = ':'.join(map(os.path.basename, filter(lambda x: x, config['RUNTIME_PATH'].split(':'))))
    start = long(time.time() * 1000)

    job_submitted_message = {'application': application_name, 'backend': backend_name, 'user': user, 'host': host, 'start':
                             start, 'plain_job': plain_job, 'master_job': master_job, 'sub_jobs': sub_jobs, 'runtime_packages': runtime_packages}

    if config['UsageMonitoringMSG']:
        from Ganga.GPIDev.MonitoringServices import MSGUtil
        msg_config = getConfig('MSGMS')
        p = MSGUtil.createPublisher(
            msg_config['server'],
            msg_config['port'],
            msg_config['username'],
            msg_config['password'])

        # start publisher thread and enqueue usage message for sending
        p.start()
        p.send(msg_config['job_submission_message_destination'], repr(job_submitted_message), {'persistent': 'true'})
        # p.send('/queue/test.ganga.jobsubmission',repr(job_submitted_message),{'persistent':'true'})
        # ask publisher thread to stop. it will send queued message anyway.
        p.stop()
        p._finalize(10.)
        if hasattr(p, 'unregister'):
            p.unregister()
        del p
Exemple #2
0
def ganga_started(session_type, **extended_attributes):
    host = getConfig('System')['GANGA_HOSTNAME']
    version = getConfig('System')['GANGA_VERSION']
    user = getConfig('Configuration')['user']
    runtime_packages = ':'.join(
        map(os.path.basename, filter(lambda x: x, config['RUNTIME_PATH'].split(':'))))
    start = long(time.time() * 1000)

    usage_message = {'user': user, 'host': host, 'start': start, 'session':
                     session_type, 'runtime_packages': runtime_packages, 'version': version}

    usage_message.update(extended_attributes)

    if config['UsageMonitoringMSG']:
        from Ganga.GPIDev.MonitoringServices import MSGUtil
        msg_config = getConfig('MSGMS')
        p = MSGUtil.createPublisher(
            msg_config['server'],
            msg_config['port'],
            msg_config['username'],
            msg_config['password'])
        # start publisher thread and enqueue usage message for sending
        p.start()
        p.send(msg_config['usage_message_destination'], repr(usage_message), {'persistent': 'true'})
        # ask publisher thread to stop. it will send queued message anyway.
        p.stop()
        p._finalize(10.)
        if hasattr(p, 'unregister'):
            p.unregister()
        del p
Exemple #3
0
def ganga_started(session_type,**extended_attributes):
    host = getConfig('System')['GANGA_HOSTNAME']
    version = getConfig('System')['GANGA_VERSION']
    user = getConfig('Configuration')['user']
    runtime_packages = ':'.join(map(os.path.basename,filter(lambda x:x, config['RUNTIME_PATH'].split(':'))))    
    start = long(time.time()*1000)

    usage_message = {'user':user,'host':host,'start':start,'session':session_type,'runtime_packages':runtime_packages,'version':version}

    usage_message.update(extended_attributes)

    if config['UsageMonitoringURL']:
        import ApMon.apmon
        global monitor
        # the ApMon constructor may start background threads to refresh the configuration from URL
        # NOTE: the configuration (including defaultLogLevel) is overriden from the config file specified in URL
        monitor = ApMon.apmon.ApMon(config['UsageMonitoringURL'], defaultLogLevel=ApMon.apmon.Logger.FATAL)
        monitor.sendParameters('GangaUsage','%s@%s_%s'%(user,host,start),usage_message)
        # stop any background threads started by the ApMon constructor
        monitor.free()

    if config['UsageMonitoringMSG']:
        from Ganga.Lib.MonitoringServices.MSGMS import MSGUtil
        msg_config = getConfig('MSGMS')
        p = MSGUtil.createPublisher(
            msg_config['server'],
            msg_config['port'],
            msg_config['username'],
            msg_config['password'])
        # start publisher thread and enqueue usage message for sending
        p.start()
        p.send(msg_config['usage_message_destination'],repr(usage_message),{'persistent':'true'})
        # ask publisher thread to stop. it will send queued message anyway.
        p.stop()
Exemple #4
0
    def prepare(self, force=False):

        super(Ostap, self).prepare(force)
        self._check_inputs()

        
        share_dir = os.path.join (
            expandfilename ( getConfig('Configuration')['gangadir'] ) ,
            'shared'                            ,
            getConfig('Configuration')['user']  ,
            self.is_prepared.name               )
        
        input_sandbox_tar = os.path.join ( share_dir , 'inputsandbox',
                                           '_input_sandbox_%s.tar' % self.is_prepared.name ) 
        input_sandbox_tgz = os.path.join ( share_dir , 'inputsandbox',
                                           '_input_sandbox_%s.tgz' % self.is_prepared.name ) 
        
        fillPackedSandbox ( self.scripts      , input_sandbox_tar        ) 
        gzipFile          ( input_sandbox_tar , input_sandbox_tgz , True )
        
        # add the newly created shared directory into the metadata system if
        # the app is associated with a persisted object
        self.checkPreparedHasParent(self)
        self.post_prepare()
        logger.debug("Finished Preparing Application in %s" % share_dir)
Exemple #5
0
def getDiracEnv(sourceFile = None):
    """
    Returns the dirac environment stored in a global dictionary by Ganga.
    Once loaded and stored this is used for executing all DIRAC code in future
    Args:
        sourceFile (str): This is an optional file path which points to the env which should be sourced for this DIRAC
    """
    global DIRAC_ENV
    with Dirac_Env_Lock:
        if sourceFile is None:
            sourceFile = 'default'
            cache_file = getConfig('DIRAC')['DiracEnvJSON']
            source_command = getConfig('DIRAC')['DiracEnvSource']
            if not cache_file and not source_command:
                source_command = getConfig('DIRAC')['DiracEnvFile']
        else:
            # Needed for backwards compatibility with old configs...
            cache_file = None
            source_command = sourceFile

        if sourceFile not in DIRAC_ENV:
            if cache_file:
                DIRAC_ENV[sourceFile] = read_env_cache(cache_file)
            elif source_command:
                DIRAC_ENV[sourceFile] = get_env(source_command)
            else:
                logger.error("'DiracEnvSource' config variable empty")
                logger.error("%s  %s" % (getConfig('DIRAC')['DiracEnvJSON'], getConfig('DIRAC')['DiracEnvSource']))

    return DIRAC_ENV[sourceFile]
Exemple #6
0
    def prepare(self, app, appconfig, appmasterconfig, jobmasterconfig):
        from Ganga.Lib.gLite import gLiteJobConfig

        prepared_exe = app.exe
        if app.is_prepared is not None:
            shared_path = os.path.join(expandfilename(getConfig('Configuration')['gangadir']),
                                       'shared', getConfig('Configuration')['user'])
            if isinstance(app.exe, str):
                # we have a file. is it an absolute path?
                if os.path.abspath(app.exe) == app.exe:
                    logger.info("Submitting a prepared application; taking any input files from %s" % (
                        app.is_prepared.name))
                    prepared_exe = File(os.path.join(os.path.join(
                        shared_path, app.is_prepared.name), os.path.basename(File(app.exe).name)))
                # else assume it's a system binary, so we don't need to
                # transport anything to the sharedir
                else:
                    prepared_exe = app.exe
            elif isinstance(app.exe, File):
                logger.info("Submitting a prepared application; taking any input files from %s" % (
                    app.is_prepared.name))
                prepared_exe = File(os.path.join(os.path.join(
                    shared_path, app.is_prepared.name), os.path.basename(File(app.exe).name)))

        return gLiteJobConfig(prepared_exe, app._getParent().inputsandbox, convertIntToStringArgs(app.args), app._getParent().outputsandbox, app.env)
Exemple #7
0
 def __init__(self, **kwargs):
     """
     Construct a voms proxy requirement and assign the default vo from the config if none has been provided
     """
     super(VomsProxy, self).__init__(**kwargs)
     if 'vo' not in kwargs and getConfig('LCG')['VirtualOrganisation']:
         self.vo = getConfig('LCG')['VirtualOrganisation']
Exemple #8
0
    def tearDown(self):
        wipe_temp_dir()

        from Ganga.Utility.Config import getConfig
        getConfig('Configuration').revertToDefault('gangadir')

        super(Savannah9638, self).tearDown()
Exemple #9
0
def ganga_started(session_type, **extended_attributes):
    host = getConfig('System')['GANGA_HOSTNAME']
    version = getConfig('System')['GANGA_VERSION']
    user = getConfig('Configuration')['user']
    runtime_packages = ':'.join(
        map(os.path.basename, filter(lambda x: x, config['RUNTIME_PATH'].split(':'))))
    start = long(time.time() * 1000)

    usage_message = {'user': user, 'host': host, 'start': start, 'session':
                     session_type, 'runtime_packages': runtime_packages, 'version': version}

    usage_message.update(extended_attributes)

    #if config['UsageMonitoringURL']:
    #    from Ganga.GPI import queues
    #    # Lets move the actual monitoring out of the main thread for some performance
    #    msg = '%s@%s_%s' % (user, host, start)
    #    queues.add( _setupMonitor, (config['UsageMonitoringURL'], msg, usage_message) )

    if config['UsageMonitoringMSG']:
        from Ganga.Lib.MonitoringServices.MSGMS import MSGUtil
        msg_config = getConfig('MSGMS')
        p = MSGUtil.createPublisher(
            msg_config['server'],
            msg_config['port'],
            msg_config['username'],
            msg_config['password'])
        # start publisher thread and enqueue usage message for sending
        p.start()
        p.send(msg_config['usage_message_destination'], repr(
            usage_message), {'persistent': 'true'})
        # ask publisher thread to stop. it will send queued message anyway.
        p.stop()
Exemple #10
0
def getDiracEnv(force=False):
    """
    Returns the dirac environment stored in a global dictionary by Ganga.
    This is expected to be stored as some form of 'source'-able file on disk which can be used to get the printenv after sourcing
    Once loaded and stored his is used for executing all DIRAC code in future
    Args:
        force (bool): This triggers a compulsory reload of the env from disk
    """
    global DIRAC_ENV
    with Dirac_Env_Lock:
        if DIRAC_ENV == {} or force:
            config_file = getConfig('DIRAC')['DiracEnvFile']
            if not os.path.exists(config_file):
                absolute_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../..', config_file)
            else:
                absolute_path = config_file
            if getConfig('DIRAC')['DiracEnvFile'] != "" and os.path.exists(absolute_path):

                env_dict = get_env_from_arg(this_arg = absolute_path, def_env_on_fail = False)

                if env_dict is not None:
                    DIRAC_ENV = env_dict
                else:
                    logger.error("Error determining DIRAC environment")
                    raise GangaException("Error determining DIRAC environment")

            else:
                logger.error("'DiracEnvFile' config variable empty or file not present")
                logger.error("Tried looking in : '%s' Please check your config" % absolute_path) 
    logger.debug("Dirac Env: %s" % DIRAC_ENV)
    return DIRAC_ENV
Exemple #11
0
def test_voms_proxy_life_cycle(gpi):
    from Ganga.GPI import VomsProxy, credential_store

    assert len(credential_store) == 0

    cred = credential_store.create(VomsProxy())
    assert cred.is_valid()
    assert len(credential_store) == 1
    assert os.path.isfile(cred.location)

    assert cred.vo == getConfig("LCG")["VirtualOrganisation"]

    cred.destroy()
    assert not cred.is_valid()

    credential_store.clear()
    assert len(credential_store) == 0

    with pytest.raises(KeyError):
        _ = credential_store[VomsProxy()]

    default_cred = credential_store.create(VomsProxy())
    explicit_default_cred = credential_store.create(VomsProxy(vo=getConfig("LCG")["VirtualOrganisation"]))
    assert explicit_default_cred == default_cred
    assert len(credential_store) == 1
Exemple #12
0
    def getJobInfo(self):
        """Create job_info from Job object."""
        j = self.job_info  # called on client, so job_info is Job object

        from Ganga.Utility.Config import getConfig
        batch = getConfig('Configuration')['Batch']
        host = getConfig('System')['GANGA_HOSTNAME']

        jj = self.job_info
        if jj.master:
            jj = jj.master

        ji = {
            'fqid': j.fqid,
            'EXECUTION_BACKEND': self.dynamic_util.cl_execution_backend(j),
            'OWNERDN': self.dynamic_util.cl_ownerdn(),
            'JOB_ID_INSIDE_THE_TASK': self.dynamic_util.cl_job_id_inside_the_task(j),
            'TASKNAME': self.dynamic_util.cl_task_name(j),
            'UNIQUEJOBID': self.dynamic_util.cl_unique_job_id(j),
            'BACKEND': self.job_info.backend.__class__.__name__,
            'BATCH':  batch,
            'GANGA_HOSTNAME': host,
            'JOB_UUID': jj.info.uuid,
            'JOB_NAME': jj.name
        }
        return ji
Exemple #13
0
def stop_ganga():

    from Ganga.Utility.logging import getLogger
    logger = getLogger()

    logger.info("Deciding how to shutdown")

    # Do we want to empty the repository on shutdown?
    from Ganga.Utility.Config import getConfig
    if 'AutoCleanup' in getConfig('TestingFramework'):
        whole_cleanup = getConfig('TestingFramework')['AutoCleanup']
    else:
        whole_cleanup = True

    logger.info("AutoCleanup: %s" % whole_cleanup)

    if whole_cleanup is True:
        # empty repository so we start again at job 0 when we restart
        logger.info("Clearing the Job and Template repositories")

        from Ganga.GPI import jobs, templates
        for j in jobs:
            try:
                j.remove()
            except:
                pass
        for t in templates:
            try:
                t.remove()
            except:
                pass
        if hasattr(jobs, 'clean'):
            jobs.clean(confirm=True, force=True)
        if hasattr(templates, 'clean'):
            templates.clean(confirm=True, force=True)

    logger.info("Shutting Down Internal Services")

    # Disable internal services such as monitoring and other tasks
    #from Ganga.Core.InternalServices import Coordinator
    # if Coordinator.servicesEnabled:
    #    Coordinator.disableInternalServices()
    #    Coordinator.servicesEnabled = False

    logger.info("Mimicking ganga exit")
    from Ganga.Core.InternalServices import ShutdownManager

    import Ganga.Core
    Ganga.Core.change_atexitPolicy(interactive_session=False, new_policy='batch')
    # This should now be safe
    ShutdownManager._ganga_run_exitfuncs()

    # Undo any manual editing of the config and revert to defaults
    from Ganga.Utility.Config import allConfigs
    for package in allConfigs.values():
        package.revertToDefaultOptions()

    # Finished
    logger.info("Test Finished")
Exemple #14
0
def dirac_inputdata(app):
    job = stripProxy(app).getJobObject()
    input_data = None
    parametricinput_data = None

    inputLFNs = []

    if hasattr(job.inputdata, "getLFNs"):
        inputLFNs = job.inputdata.getLFNs()

    if job.master:
        logger.debug("job.master.inputdata: %s " % str(job.master.inputdata))
    logger.debug("job.inputdata: %s" % str(job.inputdata))
    if hasattr(job.inputdata, "getLFNs"):
        logger.debug("getLFNs(): %s" % job.inputdata.getLFNs())

    has_input_DiracFile = False
    for this_file in job.inputfiles:
        if isType(this_file, DiracFile):
            has_input_DiracFile = True
            break
    if job.master and not has_input_DiracFile:
        for this_file in job.master.inputfiles:
            if isType(this_file, DiracFile):
                has_input_DiracFile = True
                break

    if len(inputLFNs) > 0:
        # master job with a splitter reaching prepare, hence bulk submit
        if not job.master and job.splitter:
            parametricinput_data = dirac_parametric_split(app)
            if parametricinput_data is not None and len(parametricinput_data) > getConfig("DIRAC")["MaxDiracBulkJobs"]:
                raise BackendError(
                    "Dirac",
                    "Number of bulk submission jobs '%s' exceeds the maximum allowed '%s' if more are needed please modify your config. Note there is a hard limit in Dirac of currently 1000."
                    % (len(parametricinput_data), getConfig("DIRAC")["MaxDiracBulkJobs"]),
                )
        # master job with no splitter or subjob already split proceed as normal
        else:
            input_data = job.inputdata.getLFNs()

    elif "Destination" not in job.backend.settings and not has_input_DiracFile:
        ##THIS IS NOT VERY DIRAC CENTRIC
        ##PLEASE WHEN TIME MOVE TO LHCBDIRAC where T1 is more applicable rcurrie
        ##Also editing the settings on the fly is asking for potential problems, should avoid
        t1_sites = getConfig("DIRAC")["noInputDataBannedSites"]
        logger.info("Job has no inputdata (T1 sites will be banned to help avoid overloading them).")
        if "BannedSites" in job.backend.settings:
            job.backend.settings["BannedSites"].extend(t1_sites)
            job.backend.settings["BannedSites"] = unique(job.backend.settings["BannedSites"])
        else:
            job.backend.settings["BannedSites"] = t1_sites[:]

    # import traceback
    # traceback.print_stack()

    return input_data, parametricinput_data
Exemple #15
0
    def split(self, job):
        """
            Method to do the splitting work
            Args:
                job (Job): master job to be used as a template to split subjobs
        """

        from GangaLHCb.Lib.Applications import Gauss
        if not isinstance(job.application, (Gauss, GaudiExec)):
            logger.warning("This application is of type: '%s', be careful how you use it with the GaussSplitter!" % type(job.application))

        subjobs = []

        inputdata = job.inputdata

        if not isinstance(job.application, GaudiExec):
            # I'm assuming this file is created by the Gauss Application at some stage?
            if not job.inputdata:
                share_path = os.path.join(expandfilename(getConfig('Configuration')['gangadir']),
                                          'shared',
                                          getConfig('Configuration')['user'],
                                          job.application.is_prepared.name,
                                          'inputdata',
                                          'options_data.pkl')

                if os.path.exists(share_path):
                    f = open(share_path, 'r+b')
                    #FIXME should this have been an addition?
                    inputdata = pickle.load(f)
                    f.close()

        for i in range(self.numberOfJobs):
            j = self._create_subjob(job, inputdata)
            # FIXME this starts from the 1st event and not zero, is it clear why?
            first = self.firstEventNumber + i * self.eventsPerJob + 1
            opts = 'from Configurables import GenInit \n'
            if isinstance(job.application, Gauss):
                opts += 'from Gaudi.Configuration import * \n'
                opts += 'ApplicationMgr().EvtMax = %d\n' % self.eventsPerJob
            else:
                opts += 'from Configurables import LHCbApp \n'
                opts += 'LHCbApp().EvtMax = %d\n' % self.eventsPerJob
            opts += 'GenInit("GaussGen").FirstEventNumber = %d\n' % first
            spillOver = ["GaussGenPrev", "GaussGenPrevPrev", "GaussGenNext"]
            for s in spillOver:
                opts += 'GenInit("%s").FirstEventNumber = %d\n' % (s, first)
            #j.application.extra.input_buffers['data.py'] += opts
            if isinstance(job.application, GaudiExec):
                j.application.extraOpts = j.application.extraOpts + '\n' + opts
            else:
                j._splitter_data = opts
            # j.inputsandbox.append(File(FileBuffer(path,opts).create().name))
            logger.debug("Creating job %d w/ FirstEventNumber = %d" % (i, first))
            subjobs.append(j)

        return subjobs
def repairJobRepository(jobId):
        """ Repairs job repository for the comment attribute (migration from Comment object to string) """

        def repairFilePath(filePath):
                fileRead = open(filePath, 'r')

                index = -1
                found = False

                lines = fileRead.readlines()
                fileRead.close()

                for line in lines:
                        index += 1
                        if line.find('<class name="Comment"') > -1:
                                found = True
                                break

                if found:
                        newLines = []
                        for i in range(index):
                                newLines.append(lines[i])

                        newLines.append(lines[index+1].replace('<attribute name="comment"> ', ''))

                        for i in range(index+4, len(lines)):
                                newLines.append(lines[i])

                        fileWrite = open(filePath, 'w')
                        fileWrite.write(''.join(newLines))
                        fileWrite.close()


        from Ganga.Utility.Config import getConfig

        from Ganga.Utility.files import expandfilename

        import os

        if not isinstance(jobId,int):
                return

        repositoryPath = "repository/$usr/LocalXML/6.0/jobs/$thousandsNumxxx"
        repositoryPath = repositoryPath.replace('$usr', getConfig('Configuration')['user'])

        repositoryPath = repositoryPath.replace('$thousandsNum', str(jobId/1000))

        repositoryFullPath = os.path.join(expandfilename(getConfig('Configuration')['gangadir']), repositoryPath, str(jobId))
        
        #repair also the subjobs data files
        for subjobDir in os.listdir(repositoryFullPath):
                repositorySubJobFullPath = os.path.join(repositoryFullPath, subjobDir)
                if os.path.isdir(repositorySubJobFullPath):
                        repairFilePath(os.path.join(repositorySubJobFullPath, 'data'))          

        repairFilePath(os.path.join(repositoryFullPath, 'data'))
Exemple #17
0
 def getGridStorage( self, gridhome = "" ):
    vo = getConfig( "LCG" )[ "VirtualOrganisation" ]
    if not gridhome:
       gridhome = self.gridhome
    job = self._getParent()
    id = job.id
    username = getConfig( "Configuration" )[ "user" ]
    userletter = username[ 0 ]
    gridStorage = os.path.join\
       ( gridhome, "user", userletter, username, "ganga", str( id ) )
    return gridStorage
def get_share_path(app=None):
    if app is None or app == "":
        return os.path.join(
            expandfilename(getConfig("Configuration")["gangadir"]), "shared", getConfig("Configuration")["user"]
        )
    return os.path.join(
        expandfilename(getConfig("Configuration")["gangadir"]),
        "shared",
        getConfig("Configuration")["user"],
        app.is_prepared.name,
    )
Exemple #19
0
def load_config_files():
    """
    Load the config files as a normal Ganga session would, taking
    into account environment variables etc.
    """
    from Ganga.Utility.Config import getConfig, setSessionValuesFromFiles
    from Ganga.Runtime import GangaProgram
    system_vars = {}
    for opt in getConfig('System'):
        system_vars[opt] = getConfig('System')[opt]
    config_files = GangaProgram.get_config_files(os.path.expanduser('~/.gangarc'))
    setSessionValuesFromFiles(config_files, system_vars)
Exemple #20
0
 def test_GaudiPython_prepare(self):
     g = self.job.application
     g.prepare()
     assert g.is_prepared is not None, 'is_prepared not correctly set'
     g.is_prepared.ls()
     share_path = os.path.join(expandfilename(getConfig('Configuration')['gangadir']),
                               'shared',
                               getConfig('Configuration')['user'],
                               g.is_prepared.name,
                               'inputsandbox')
     os.system(
         'cd ' + share_path + ';tar -xzvf _input_sandbox_' + g.is_prepared.name + '.tgz')
     assert os.path.exists(os.path.join(share_path, 'dummy.script'))
Exemple #21
0
 def test_optionsString(self):
     if not getConfig('Output')['ForbidLegacyInput']:
         ds = make_dataset(['lfn:a', 'lfn:b', 'pfn:c'])
     else:
         ds = make_dataset(['lfn:a', 'lfn:b', 'c'])
     this_str = ds.optionsString()
     print("---\n%s\n---\n" % this_str)
     assert this_str.find('LFN:a') >= 0
     assert this_str.find('LFN:b') >= 0
     if getConfig('Output')['ForbidLegacyInput']:
         assert this_str.find('PFN:') >= 0
     else:
         assert this_str.find('c') >= 0
Exemple #22
0
 def test_hasLFNs(self):
     if not getConfig('Output')['ForbidLegacyInput']:
         ds = make_dataset(['lfn:a'])
         assert ds.hasLFNs()
         if not getConfig('Output')['ForbidLegacyInput']:
             ds = make_dataset(['pfn:a'])
         else:
             ds = make_dataset(['a'])
         assert not ds.hasLFNs()
     else:
         ds = make_new_dataset([DiracFile(lfn='a')])
         assert ds.hasLFNs()
         ds = make_new_dataset([LocalFile('/some/local/file')])
         assert ds.hasPFNs()
Exemple #23
0
def stop_ganga():
    """
    This test stops Ganga and shuts it down

    Most of the logic is weapped in ShutdownManager._ganga_run_exitfuncs but additional code is used to cleanup repos and such between tests
    """

    from Ganga.Utility.logging import getLogger
    logger = getLogger()

    logger.info("Deciding how to shutdown")

    # Do we want to empty the repository on shutdown?
    from Ganga.Utility.Config import getConfig
    if 'AutoCleanup' in getConfig('TestingFramework'):
        whole_cleanup = getConfig('TestingFramework')['AutoCleanup']
    else:
        whole_cleanup = True
    logger.info("AutoCleanup: %s" % whole_cleanup)

    if whole_cleanup is True:
        emptyRepositories()

    logger.info("Shutting Down Internal Services")

    # Disable internal services such as monitoring and other tasks
    #from Ganga.Core.InternalServices import Coordinator
    # if Coordinator.servicesEnabled:
    #    Coordinator.disableInternalServices()
    #    Coordinator.servicesEnabled = False

    logger.info("Mimicking ganga exit")
    from Ganga.Core.InternalServices import ShutdownManager

    # make sure we don't have an interactive shutdown policy
    from Ganga.Core.GangaThread import GangaThreadPool
    GangaThreadPool.shutdown_policy = 'batch'

    # This should now be safe
    ShutdownManager._ganga_run_exitfuncs()

    logger.info("Clearing Config")

    # Undo any manual editing of the config and revert to defaults
    clear_config()

    # Finished
    logger.info("Test Finished")
Exemple #24
0
def dirac_inputdata(app, hasOtherInputData=False):
    """ Construct the JDL component which requests the inputdata for a job
    Args:
        app (IApplication): app which belongs to the job of interest
        hasOtherInputData (bool): This is used to stop BannedSites being added to the JDL structure through backend.settings
    """
    job = app.getJobObject()
    input_data = None
    parametricinput_data = None

    inputLFNs = []

    input_data = None
    parametricinput_data = None

    if not job.inputdata and (not job.master or not job.master.inputdata):
        return input_data, parametricinput_data

    wanted_job = job
    if not job.inputdata and job.master and job.master.inputdata is not None and job.master.inputdata:
        wanted_job = job.master

    inputLFNs = ['LFN:'+this_file.lfn for this_file in wanted_job.inputdata if isinstance(this_file, DiracFile)]

    # master job with a splitter reaching prepare, hence bulk submit
    if not job.master and job.splitter:
        parametricinput_data = dirac_parametric_split(app)
        if parametricinput_data is not None and len(parametricinput_data) > getConfig('DIRAC')['MaxDiracBulkJobs']:
            raise BackendError('Dirac', 'Number of bulk submission jobs \'%s\' exceeds the maximum allowed \'%s\' if more are needed please modify your config. Note there is a hard limit in Dirac of currently 1000.' % (
                len(parametricinput_data), getConfig('DIRAC')['MaxDiracBulkJobs']))
        # master job with no splitter or subjob already split proceed as normal
        else:
            input_data = inputLFNs

    if 'Destination' not in job.backend.settings and not inputLFNs and not hasOtherInputData:
        t1_sites = getConfig('DIRAC')['noInputDataBannedSites']
        logger.info('Job has no inputdata (T1 sites will be banned to help avoid overloading them).')
        if 'BannedSites' in job.backend.settings:
            job.backend.settings['BannedSites'].extend(t1_sites)
            job.backend.settings['BannedSites'] = unique(job.backend.settings['BannedSites'])
        else:
            if t1_sites:
                job.backend.settings['BannedSites'] = t1_sites[:]

    if not input_data and not parametricinput_data:
        input_data = inputLFNs

    return input_data, parametricinput_data
Exemple #25
0
def postBootstrapHook():
    from Ganga.Utility.logging import getLogger

    logger = getLogger()

    from Ganga.Utility.Config import getConfig
    cfg = getConfig('MonitoringServices')

    MONITORING_DEFAULT = "Ganga.Lib.MonitoringServices.Dashboard.LCGAthenaMS.LCGAthenaMS"

    for name in cfg.options:
        value = cfg[name]
        if 'Athena' in name.split('/') and ('LCG' in name.split('/') or 'CREAM' in name.split('/')):
            if not MONITORING_DEFAULT in value.split(','):
             logger.error('''*** Outdated monitoring configuration - check your configuration files ***

*** Outdated monitoring configuration - check your configuration files ***

Maybe your ~/.gangarc contains old entries which override new defaults?
You may also check the configuration files defined by $GANGA_CONFIG_PATH or $GANGA_CONFIG environment variables.

To fix this problem simply remove (or comment out) the following lines in [MonitoringServices] section:
Athena/LCG=...
Athena/CREAM=...

For now I will add the correct default settings (%s) to the configuration of this Ganga session.
Note that in the future you won't be able to start Ganga until these issues are corrected manually.
'''%MONITORING_DEFAULT)

             cfg.setUserValue(name,value+','+MONITORING_DEFAULT)
Exemple #26
0
def DiracSizeSplitter(inputs, filesPerJob, maxSize, ignoremissing):
    """
    Generator that yields a datasets for LHCbdirac split jobs by size
    """
    #logger.debug( "DiracSplitter" )
    #logger.debug( "inputs: %s" % str( inputs ) )
    split_files = []
    i = inputs.__class__()

    if inputs.getLFNs() != len(inputs.files):
        raise SplittingError(
            "Error trying to split dataset using DIRAC backend with non-DiracFile in the inputdata")

    all_files = igroup(inputs.files[:maxFiles], getConfig('DIRAC')['splitFilesChunks'],
                       leftovers=True)

    #logger.debug( "Looping over all_files" )
    #logger.debug( "%s" % str( all_files ) )

    for files in all_files:

        i.files = files

        LFNsToSplit = i.getLFNs()

        if(len(LFNsToSplit)) > 1:

            result = execute('splitInputDataBySize(%s,%d)'
                             % (i.getLFNs(), filesPerJob))

            if not result_ok(result):
                logger.error('DIRAC:: Error splitting files: %s' % str(result))
                raise SplittingError('Error splitting files.')

            split_files += result.get('Value', [])

        else:

            split_files = [LFNsToSplit]

    if len(split_files) == 0:
        raise SplittingError('An unknown error occured.')

    # FIXME
    # check that all files were available on the grid
    big_list = []
    for l in split_files:
        big_list.extend(l)
    diff = set(inputs.getFileNames()[:maxFiles]).difference(big_list)
    if len(diff) > 0:
        for f in diff:
            logger.warning('Ignored file: %s' % f)
        if not ignoremissing:
            raise SplittingError('Some files not found!')
    ###

    logger.debug("Split Files: %s" % str(split_files))

    for dataset in split_files:
        yield dataset
Exemple #27
0
 def dorun(self):
     """Executes a run of actions and sleep periods.
     
     Initialises runid to the current UTC ID.
     
     """
     self.runid = Utility.utcid()
     while 1:
         logger.info("Start run %s with id '%s'.", self.run, self.runid)
         for action in self.run:
             try:
                 self._doaction(action)
             except GangaRobotContinueError as e:
                 logger.warning("Continue Error in Action '%s' with message '%s'. Run continued", action, e)
             except GangaRobotBreakError as e:
                 logger.warning("Break Error in Action '%s' with message '%s'. Run ended", action, e)
                 break
             except GangaRobotFatalError as e:
                 logger.error("Fatal Error in Action '%s' with message '%s'. Run aborted", action, e)
                 raise
             except Exception as e:
                 config = getConfig('Robot')
                 if (config['ExceptionBehaviour'] == 'Continue'):
                     logger.error("Error in Action '%s' with message '%s'. Run continued", action, e)
                 elif (config['ExceptionBehaviour'] == 'Break'):
                     logger.error("Error in Action '%s' with message '%s'. Run continued", action, e)
                     break
                 else:
                     logger.error("Abort run id '%s'. Action '%s' failed with message %s.", self.runid, action, e)
                     raise
         logger.info("Finish run id '%s'.", self.runid)
         if not self.repeat: 
             break
Exemple #28
0
    def setLocation(self):
        """
        Sets the location of output files that were uploaded to mass storage from the WN
        """
        
        postprocessLocationsPath = os.path.join(self.joboutputdir, getConfig('Output')['PostProcessLocationsFileName'])
        if not os.path.exists(postprocessLocationsPath):
            return

        postprocesslocations = open(postprocessLocationsPath, 'r')
        
        for line in postprocesslocations.readlines():
                
            if line.strip() == '':      
                continue

            lineParts = line.split(' ') 
            outputPattern = lineParts[1]
            outputPath = lineParts[2]           

            if line.startswith('massstorage'):

                if outputPattern == self.namePattern:
                    massStorageLocation = outputPath.strip('\n')        
                    if massStorageLocation not in self.locations:
                        self.locations.append(massStorageLocation)
     
        postprocesslocations.close()
Exemple #29
0
def getSharedPath():
    # Required to be initialized for ShareDir object
    from Ganga.Utility.files import expandfilename
    import os.path
    Conf_config = getConfig('Configuration')
    root_default = os.path.join(expandfilename(Conf_config['gangadir']), 'shared', Conf_config['user'])
    return root_default
Exemple #30
0
    def setLocation(self):
        """
        """

        logger.debug("DiracFile: setLocation")

        if not stripProxy(self).getJobObject():
            logger.error("No job assocaited with DiracFile: %s" % str(self))
            return

        job = self.getJobObject()
        postprocessLocationsPath = os.path.join(job.outputdir, getConfig('Output')['PostProcessLocationsFileName'])

        postprocesslocations = None

        try:
            postprocesslocations = open(postprocessLocationsPath, 'r')
            self.subfiles = []
            for line in postprocesslocations.readlines():
                if line.startswith('DiracFile'):
                    if self.dirac_line_processor(line, self, os.path.dirname(postprocessLocationsPath)) and regex.search(self.namePattern) is None:
                        logger.error("Error processing line:\n%s\nAND: namePattern: %s is NOT matched" % (str(line), str(self.namePattern)))

        except Exception, err:
            logger.warning("unexpected Error: %s" % str(err))
Exemple #31
0
from Ganga.Utility.Config import getConfig
from Ganga.Utility.logging import getLogger

from Ganga.Utility.GridShell import getShell

from Ganga.GPIDev.Credentials.ICredential import ICredential

logger = getLogger()

logger.critical('LCG Grid Simulator ENABLED')

##########################################################################
# GRID SIMULATOR
##########################################################################

config = getConfig("GridSimulator")


def sleep(val):
    import time
    time.sleep(get_number(val))


def failed(val):
    t = get_number(val)
    import random
    return random.random() < t


def get_number(val):
    import random
Exemple #32
0
from Ganga.Utility.Config import getConfig

outputconfig = getConfig("Output")


def getSharedPath():
    # Required to be initialized for ShareDir object
    from Ganga.Utility.files import expandfilename
    import os.path
    Conf_config = getConfig('Configuration')
    root_default = os.path.join(expandfilename(Conf_config['gangadir']),
                                'shared', Conf_config['user'])
    return root_default
Exemple #33
0
#   The absolute path to the job's temporary working directory.
#=============================


sge_config.addOption('preexecute', 'os.chdir(os.environ["TMPDIR"])\nos.environ["PATH"]+=":."',
                 "String contains commands executing before submiting job to queue")
sge_config.addOption('postexecute', '', "String contains commands executing before submiting job to queue")
sge_config.addOption('jobnameopt', 'N', "String contains option name for name of job in batch system")
sge_config.addOption('timeout', 600, 'Timeout in seconds after which a job is declared killed if it has not touched its heartbeat file. Heartbeat is touched every 30s so do not set this below 120 or so.')

# ------------------------------------------------
# Mergers
merge_config = makeConfig('Mergers', 'parameters for mergers')
merge_config.addOption('associate', {'log': 'TextMerger', 'root': 'RootMerger',
                               'text': 'TextMerger', 'txt': 'TextMerger'}, 'Dictionary of file associations')
gangadir = getConfig('Configuration')['gangadir']
merge_config.addOption('merge_output_dir', gangadir +
                 '/merge_results', "location of the merger's outputdir")
merge_config.addOption('std_merge', 'TextMerger', 'Standard (default) merger')

# ------------------------------------------------
# Preparable
preparable_config = makeConfig('Preparable', 'Parameters for preparable applications')
preparable_config.addOption('unprepare_on_copy', False, 'Unprepare a prepared application when it is copied')

# ------------------------------------------------
# GPIComponentFilters
gpicomp_config = makeConfig('GPIComponentFilters', """Customization of GPI component object assignment
for each category there may be multiple filters registered, the one used being defined
in the configuration file in [GPIComponentFilters]
e.g: {'datasets':{'lhcbdatasets':lhcbFilter, 'testdatasets':testFilter}...}
Exemple #34
0
        param += '-o "%s" ' % (
            outfiles
        )  # must be double quotes, because python prints strings in 'single quotes'

        for file in jspec.Files:
            if file.type in ['output', 'log'] and configPanda['chirpconfig']:
                file.dispatchDBlockToken = configPanda['chirpconfig']
                logger.debug('chirp file %s', file)

        jspec.jobParameters = param

        return jspec

from Ganga.GPIDev.Credentials import GridProxy

gridProxy = GridProxy()

from Ganga.GPIDev.Adapters.ApplicationRuntimeHandlers import allHandlers

allHandlers.add('Executable', 'Panda', ExecutablePandaRTHandler)

from Ganga.Utility.Config import getConfig, ConfigError

config = getConfig('Athena')
configDQ2 = getConfig('DQ2')
configPanda = getConfig('Panda')

from Ganga.Utility.logging import getLogger

logger = getLogger()
from Ganga.GPIDev.Adapters.ApplicationRuntimeHandlers import allHandlers

from GangaAtlas.Lib.ATLASDataset import isDQ2SRMSite, getLocationsCE, getIncompleteLocationsCE, getIncompleteLocations
from GangaAtlas.Lib.ATLASDataset import ATLASLocalDataset
from GangaAtlas.Lib.ATLASDataset import DQ2Dataset

from Ganga.Utility.Config import getConfig, makeConfig, ConfigError
from Ganga.Utility.logging import getLogger

from Ganga.GPIDev.Adapters.IRuntimeHandler import IRuntimeHandler
from Ganga.Utility.files import expandfilename

from GangaAtlas.Lib.ATLASDataset.DQ2Dataset import dq2outputdatasetname
shared_path = os.path.join(
    expandfilename(getConfig('Configuration')['gangadir']), 'shared',
    getConfig('Configuration')['user'])

__directory__ = os.path.dirname(__file__)


def _append_file_buffer(inputbox, name, array):

    inputbox.append(FileBuffer(name, '\n'.join(array) + '\n'))


def _append_files(inputbox, *names):

    for name in names:
        inputbox.append(File(os.path.join(__directory__, name)))
Exemple #36
0
class Gaudi(GaudiBase):

    _name = 'Gaudi'
    __doc__ = GaudiDocString(_name)
    _category = 'applications'
    _exportmethods = GaudiBase._exportmethods[:]
    _exportmethods += ['prepare', 'unprepare']
    _hidden = 1
    _schema = GaudiBase._schema.inherit_copy()

    docstr = 'The gaudirun.py cli args that will be passed at run-time'
    _schema.datadict['args'] = SimpleItem(defvalue=['-T'],
                                          sequence=1,
                                          strict_sequence=0,
                                          typelist=['str', 'type(None)'],
                                          doc=docstr)
    docstr = 'The name of the optionsfile. Import statements in the file ' \
             'will be expanded at submission time and a full copy made'
    _schema.datadict['optsfile'] = FileItem(preparable=1,
                                            sequence=1,
                                            strict_sequence=0,
                                            defvalue=[],
                                            doc=docstr)
    docstr = 'A python configurable string that will be appended to the '  \
             'end of the options file. Can be multiline by using a '  \
             'notation like \nHistogramPersistencySvc().OutputFile = '  \
             '\"myPlots.root"\\nEventSelector().PrintFreq = 100\n or by '  \
             'using triple quotes around a multiline string.'
    _schema.datadict['extraopts'] = SimpleItem(preparable=1,
                                               defvalue=None,
                                               typelist=['str', 'type(None)'],
                                               doc=docstr)

    _schema.version.major += 0
    _schema.version.minor += 0

    def _auto__init__(self):
        """bootstrap Gaudi applications. If called via a subclass
        set up some basic structure like version platform..."""
        self._init()

    def _parse_options(self):
        raise NotImplementedError

    def prepare(self, force=False):

        from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList
        from Ganga.GPIDev.Lib.File.File import File
        if isType(self.optsfile, (list, tuple, GangaList)):
            for this_file in self.optsfile:
                if type(this_file) is str:
                    this_file = File(this_file)
                else:
                    continue

        elif type(self.optsfile) is str:
            self.optsfile = [File(self.optsfile)]

        try:
            super(Gaudi, self).prepare(force)
        except Exception, err:
            logger.debug("Super Prepare Error:\n%s" % str(err))
            raise err

        logger.debug("Prepare")

        _is_prepared = self.is_prepared

        #logger.info("_is_prepared: %s" % _is_prepared)

        share_dir = os.path.join(
            expandfilename(getConfig('Configuration')['gangadir']), 'shared',
            getConfig('Configuration')['user'], _is_prepared.name)

        # We will return a list of files 'send_to_share' which will be copied into the jobs
        # inputsandbox when prepare called from job object. NOTE that these files will not go
        # into an inputsandbox when prepare called on standalone app.
        # Things in the inputsandbox end up in the working dir at runtime.

        # Exception is just re-thrown here after setting is_prepared to None
        # could have done the setting in the actual functions but didnt want
        # prepared state altered from the readInputData pseudo-static member
        try:
            self._check_inputs()
        except Exception, err:
            logger.debug("_check_inputs Error:\n%s" % str(err))
            self.unprepare()
            raise err
Exemple #37
0
class CredentialStore(GangaObject, collections.Mapping):
    """
    The central management for all credentials

    It is not intended to store the credential objects between sessions,
    rather it will search the filesystem or create new credential files.

    Its interface is similar to a dictionary with ``ICredentialRequirement``
    objects as keys and ``ICredentialInfo`` objects as values.

    A single instance of this class makes most sense and should be created in the bootstrap and exported.


    E.g:

    To create a new Afs Token call::

        credential_store.create(AfsToken())

    To create a new Dirac Proxy call::

        credential_store.create(DiracProxy())


    To destroy a proxy which is already in the store call::

        credential_store[VomsProxy()].destroy()


    To get a summary of the available proxies already in the store simply type::

        credential_store
    """

    _schema = Schema(Version(1, 0), {})

    _category = 'credentials2'
    _hidden = 1  # This class is hidden since we want a 'singleton' created in the bootstrap

    retry_limit = 5
    enable_caching = True

    _last_clean = None
    _clean_delay = getConfig('Credentials')['CleanDelay']

    __slots__ = ('credentials', )

    def __init__(self):
        super(CredentialStore, self).__init__()
        self.credentials = set()

    @export
    def create(self, query, create=True, check_file=False):
        # type: (ICredentialRequirement, bool, bool) -> ICredentialInfo
        """
        Create an ``ICredentialInfo`` for the query.

        Args:
            query (ICredentialRequirement):
            check_file (bool): Raise an exception if the file does not exist
            create (bool): Create the credential file

        Returns:
            The newly created ICredentialInfo object
        """

        try:
            assert isinstance(query,
                              ICredentialRequirement), "Error checking 'query'"
            assert isinstance(create, bool), "Error checking 'create'"
            assert isinstance(check_file, bool), "Error checking 'check_file'"
        except AssertionError as err:
            raise CredentialsError(
                "Requirements to make a Credential are wrong. Please check your arguments. %s"
                % err)

        cred = query.info_class(query, check_file=check_file, create=create)
        self.credentials.add(cred)
        return cred

    def remove(self, credential_object):
        # type: (ICredentialInfo) -> None
        """
        Args:
            credential_object (ICredentialInfo):
        """

        self.credentials.remove(credential_object)

    @export
    def __str__(self, interactive=False):
        """
        This creates a table summary of all credentials known to the store
        """
        self.clean()
        headers = ['Type', 'Location', 'Valid', 'Time left']
        cred_info = [
            [
                str(f) for f in (type(cred).__name__, cred.location,
                                 cred.is_valid(), cred.time_left())
            ]  # Format each field as a string
            for cred in self.credentials  # for each credential in the store
        ]
        return self._create_table(headers, cred_info)

    @staticmethod
    def _create_table(headers, data):
        """
        Create a formatted table out of the headers and data
        Args:
            headers (List[str]): the strings for the table headers
            data (List[List[str]]): the body of the table

        Returns (str): a formatted string displaying the data
        """
        rows = [headers] + data
        # Get the length of the longest string in each column
        column_widths = [
            max(len(field) for field in column) for column in zip(*rows)
        ]

        def pad_row_strings(row, widths, filler=' '):
            # type: (List[str], List[int], str) -> List[str]
            """Add padding to each of ``row`` to equal the corresponding ``widths`` entry, padded with ``filler``"""
            return [
                '{field:{filler}<{width}}'.format(field=field[0],
                                                  filler=filler,
                                                  width=field[1])
                for field in zip(row, widths)
            ]

        padded_headers = pad_row_strings(headers, column_widths)
        padded_dividers = pad_row_strings([''] * len(column_widths),
                                          column_widths,
                                          filler='-')
        padded_data = [pad_row_strings(row, column_widths) for row in data]

        # Concatenate the field strings together
        def strings_to_row(strings, spacer='|'):
            # type: (List[str], str) -> str
            return ' {0} '.format(spacer).join(strings)

        header = strings_to_row(padded_headers)
        divider = strings_to_row(padded_dividers, spacer='+')
        body = '\n'.join(
            strings_to_row(padded_field_strings)
            for padded_field_strings in padded_data)

        return '\n'.join([header, divider, body])

    @export
    def __iter__(self):
        """Allow iterating over the store directly"""
        # yield from self.credentialList #In Python 3.3
        return iter(self.credentials)

    @export
    def __len__(self):
        """How many credentials are known about in the system"""
        return len(self.credentials)

    @export
    def __getitem__(self, query):
        # type: (ICredentialRequirement) -> ICredentialInfo
        """
        This function will try quite hard to find and wrap any
        missing credential but will never create a new file on disk.
        It should *always* be non-interactive so that it can be run
        on background threads without issue.

        Args:
            query (ICredentialRequirement):

        Returns:
            A single ICredentialInfo object which matches the requirements

        Raises:
            GangaKeyError: If it could not provide a credential
            GangaTypeError: If query is of the wrong type
        """

        self.clean()

        if not isinstance(query, ICredentialRequirement):
            raise GangaTypeError(
                'Credential store query should be of type ICredentialRequirement'
            )

        match = self.match(query)
        if match:
            return match

        try:
            cred = self.create(query, create=False, check_file=True)
        except IOError as err:
            logger.debug(err.strerror)
        except CredentialsError as err:
            logger.debug(str(err))
        else:
            self.credentials.add(cred)
            return cred

        raise GangaKeyError(
            'Matching credential [{query}] not found in store.'.format(
                query=query))

    def get(self, query, default=None):
        # type: (ICredentialRequirement, Optional[ICredentialInfo]) -> Optional[ICredentialInfo]
        """
        Return the value for ``query`` if ``query`` is in the store, else default.
        If ``default`` is not given, it defaults to ``None``, so that this method never raises a ``KeyError``.

        Args:
            query (ICredentialRequirement):
            default (ICredentialInfo):

        Returns:
            A single ICredentialInfo object which matches the requirements or ``default``
        """

        try:
            assert isinstance(query,
                              ICredentialRequirement), "Error checking 'query'"
            if default is not None:
                assert isinstance(default,
                                  ICredentialInfo), "Error checking 'default'"
        except AssertionError as err:
            raise CredentialsError(
                "Requirements for get-ing a Credential are wrong. Please check your arguments. %s"
                % err)

        try:
            return self[query]
        except KeyError:
            return default

    def get_all_matching_type(self, query):
        # type: (ICredentialRequirement) -> Sequence[ICredentialInfo]
        """
        Returns all ``ICredentialInfo`` with the type that matches the query

        Args:
            query (ICredentialRequirement):

        Returns:
            list[ICredentialInfo]: An list of all matching objects
        """

        try:
            assert isinstance(query,
                              ICredentialRequirement), "Error checking 'query'"
        except AssertionError as err:
            raise CredentialsError(
                "Requirements for matching all Credential are wrong. Please check your arguments. %s"
                % err)

        return [
            cred for cred in self.credentials
            if isinstance(cred, query.info_class)
        ]

    def matches(self, query):
        # type: (ICredentialRequirement) -> Sequence[ICredentialInfo]
        """
        Search the credentials in the store for all matches. They must match every condition exactly.

        Args:
            query (ICredentialRequirement):

        Returns:
            list[ICredentialInfo]: An list of all matching objects
        """

        try:
            assert isinstance(query,
                              ICredentialRequirement), "Error checking 'query'"
        except AssertionError as err:
            raise CredentialsError(
                "Requirements for matching any Credential are wrong. Please check your arguments. %s"
                % err)

        return [
            cred for cred in self.get_all_matching_type(query)
            if cred.check_requirements(query)
        ]

    def match(self, query):
        # type: (ICredentialRequirement) -> ICredentialInfo
        """
        Returns a single match from the store

        Args:
            query (ICredentialRequirement):

        Returns:
            ICredentialInfo: A single credential object. If more than one is found, the first is returned
        """

        try:
            assert isinstance(query,
                              ICredentialRequirement), "Error checking 'query'"
        except AssertionError as err:
            raise CredentialsError(
                "Requirements for matching a Credential are wrong. Please check your arguments. %s"
                % err)

        matches = self.matches(query)
        if len(matches) == 1:
            return matches[0]
        if len(matches) > 1:
            logger.debug('More than one match...')
            # If we have a specific object and a general one. Then we ask for a general one, what should we do.
            # Does it matter since they've only asked for a general proxy? What are the use cases?
            return matches[
                0]  # TODO For now just return the first one... Though perhaps we should merge them or something?
        return None

    @export
    def renew(self):
        # type: () -> None
        """
        Renew all credentials which are invalid or will expire soon.
        It also uses the entries in `needed_credentials` and adds and renews those
        TODO Should this function be standalone?
        """
        for cred in self.credentials:
            if not cred.is_valid() or cred.time_left() < timedelta(hours=1):
                cred.renew()
        for cred_req in needed_credentials - self.credentials:
            try:
                self[cred_req].renew()
            except KeyError:
                self.create(cred_req)

    @export
    def clear(self):
        # type: () -> None
        """
        Remove all credentials in the system (without destroying them)
        """
        self.credentials = set()

    def clean(self):
        # type: () -> None
        """
        Remove any credentials with missing files
        """
        this_time = time.time()
        if not CredentialStore._last_clean:
            CredentialStore._last_clean = this_time
            self.credentials = set(cred for cred in self.credentials
                                   if cred.exists())
        elif this_time - CredentialStore._last_clean > CredentialStore._clean_delay:
            self.credentials = set(cred for cred in self.credentials
                                   if cred.exists())
            CredentialStore._last_clean = this_time
Exemple #38
0
                    if not x.exists():
                        raise ApplicationConfigurationError(None, '%s: file not found' % x.name)
                except AttributeError as err:
                    raise ApplicationConfigurationError(err, '%s (%s): unsupported type, must be a string or File' % (str(x), str(type(x))))

        validate_argument(self.exe, exe=1)

        for a in self.args:
            validate_argument(a)

        return (None, None)

# disable type checking for 'exe' property (a workaround to assign File() objects)
# FIXME: a cleaner solution, which is integrated with type information in
# schemas should be used automatically
config = getConfig('defaults_Executable')  # _Properties
#config.setDefaultOption('exe',Executable._schema.getItem('exe')['defvalue'], type(None),override=True)
config.options['exe'].type = type(None)

# not needed anymore:
#   the backend is also required in the option name
#   so we need a kind of dynamic options (5.0)
#mc = getConfig('MonitoringServices')
#mc['Executable'] = None


def convertIntToStringArgs(args):

    result = []

    for arg in args:
Exemple #39
0
    def _display(self, colour):
        from Ganga.Utility.ColourText import ANSIMarkup, NoMarkup, getColour, Foreground, Effects

        if colour:
            markup = ANSIMarkup()
        else:
            markup = NoMarkup()

        fg = Foreground()

        display_config = getConfig('Display')

        name_colour = getColour(display_config['config_name_colour'])
        docstring_colour = getColour(
            display_config['config_docstring_colour'])  # fg.boldgrey
        value_colour = getColour(
            display_config['config_value_colour'])  # fx.normal

        levels = ['**', '* ', '  ']
        levels = map(lambda x: markup(x, fg.red), levels)
        from cStringIO import StringIO
        sio = StringIO()
        sio.write('%s' % markup(stripProxy(self).name, name_colour) + ' : ' +
                  markup(stripProxy(self).docstring, docstring_colour) + '\n')
        opts = sorted(stripProxy(self).options.keys())
        INDENT = '     ' * 2
        p = re.compile('[\.\w]*\.')
        for o in opts:
            sio.write(
                levels[stripProxy(self).getEffectiveLevel(o)] + '   ' +
                markup(o, name_colour) + ' = ' +
                markup(p.sub('', repr(stripProxy(self)[o])), value_colour) +
                '\n')
            sio.write(
                textwrap.fill(markup(
                    stripProxy(self).options[o].docstring.strip(),
                    docstring_colour),
                              width=80,
                              initial_indent=INDENT,
                              subsequent_indent=INDENT) + '\n')
            typelist = stripProxy(self).options[o].typelist
            if not typelist:
                typedesc = 'Type: ' + \
                    p.sub('',str(type(stripProxy(self).options[o].default_value)))
            else:
                typedesc = 'Allowed types: ' + \
                    str([p.sub('',str(t)) for t in typelist])
            sio.write(markup(INDENT + typedesc, docstring_colour) + '\n')
            filter = stripProxy(self).options[o].filter
            if filter:
                filter_doc = filter.__doc__
                if not filter_doc:
                    filter_doc = "undocumented"
                sio.write(
                    markup(INDENT + "Filter: " +
                           filter_doc, docstring_colour) + '\n')
            examples = stripProxy(self).options[o].examples
            if examples:
                sio.write(
                    markup(INDENT + "Examples:", docstring_colour) + '\n')
                for e in examples.splitlines():
                    sio.write(
                        markup(INDENT + e.strip(), docstring_colour) + '\n')

        return sio.getvalue()
Exemple #40
0
    def _getRootEnvSys(self, version, usepython=False):
        """Returns an environment suitable for running Root and sometimes Python."""
        from os.path import join
        from os import environ

        from Ganga.Lib.Root.shared import setEnvironment, findPythonVersion

        rootsys = getrootsys(version)

        logger.info("rootsys: %s" % str(rootsys))

        rootenv = {}
        # propagate from localhost
        if 'PATH' in environ:
            setEnvironment('PATH',
                           environ['PATH'],
                           update=True,
                           environment=rootenv)
        if 'LD_LIBRARY_PATH' in environ:
            setEnvironment('LD_LIBRARY_PATH',
                           environ['LD_LIBRARY_PATH'],
                           update=True,
                           environment=rootenv)

        setEnvironment('LD_LIBRARY_PATH',
                       join(rootsys, 'lib'),
                       update=True,
                       environment=rootenv)
        setEnvironment('PATH',
                       join(rootsys, 'bin'),
                       update=True,
                       environment=rootenv)
        setEnvironment('ROOTSYS', rootsys, update=False, environment=rootenv)
        logger.debug('Have set Root variables. rootenv is now %s',
                     str(rootenv))

        if usepython:
            # first get from config
            python_version = ''
            try:
                python_version = getConfig('ROOT')['pythonversion']
            except ConfigError as e:
                logger.debug(
                    'There was a problem trying to get [ROOT]pythonversion: %s.',
                    e)

            logger.debug('Found version of python: %s', str(python_version))

            # now try grepping files
            if not python_version:
                python_version = findPythonVersion(rootsys)

            if (python_version is None):
                logger.warn(
                    'Unable to find the Python version needed for Root version %s. See the [ROOT] section of your .gangarc file.',
                    version)
            else:
                logger.debug('Python version found was %s', python_version)
            python_home = getpythonhome(pythonversion=python_version)
            logger.info("Looking in: %s" % python_home)
            logger.debug('PYTHONHOME is being set to %s', python_home)

            python_bin = join(python_home, 'bin')
            setEnvironment('PATH',
                           python_bin,
                           update=True,
                           environment=rootenv)
            setEnvironment('PYTHONPATH',
                           join(rootsys, 'lib'),
                           update=True,
                           environment=rootenv)
            logger.debug('Added PYTHONPATH. rootenv is now %s', str(rootenv))

            if join(python_bin, 'python') != sys.executable:
                # only try to do all this if the python currently running isn't
                # going to be used
                logger.debug('Using a different Python - %s.', python_home)
                python_lib = join(python_home, 'lib')

                if not os.path.exists(python_bin) or not os.path.exists(
                        python_lib):
                    logger.error(
                        'The PYTHONHOME specified does not have the expected structure. See the [ROOT] section of your .gangarc file.'
                    )
                    logger.error('PYTHONPATH is: ' + str(os.path))

                setEnvironment('LD_LIBRARY_PATH',
                               python_lib,
                               update=True,
                               environment=rootenv)
                setEnvironment('PYTHONHOME',
                               python_home,
                               update=False,
                               environment=rootenv)
                setEnvironment('PYTHONPATH',
                               python_lib,
                               update=True,
                               environment=rootenv)

        return (rootenv, rootsys)
Exemple #41
0
def file_contains(filename, string):
    return file(filename).read().find(string) != -1


def write_file(filename, content):
    """ Open,write and close the file descriptor"""
    f = open(filename, 'w')
    try:
        return f.write(content)
    finally:
        f.close()


def read_file(filename):
    """ read the file, and safely close the file at the end"""
    f = open(filename)
    try:
        return "\n%s\n" % f.read()
    finally:
        f.close()


import unittest
failureException = unittest.TestCase.failureException

try:
    from Ganga.Utility.Config import getConfig
    config = getConfig('TestingFramework')
except:  # if we are outside Ganga, use a simple dict
    config = {}
Exemple #42
0
from Ganga.GPIDev.Credentials import credential_store

from Ganga.Utility.Config import getConfig
from Ganga.Utility.logging import getLogger

from Ganga.Utility.GridShell import getShell

from Ganga.Lib.LCG.GridftpSandboxCache import GridftpFileIndex, GridftpSandboxCache

from Ganga.Lib.LCG.Utility import get_uuid
from Ganga.Lib.Root import randomString

# global variables
logger = getLogger()

config = getConfig('LCG')


def __set_submit_option__():

    submit_option = ''

    if config['Config']:
        submit_option += ' --config %s' % config['Config']
    elif config['GLITE_ALLOWED_WMS_LIST']:
        wms_conf_path = os.path.join(os.environ['GLITE_WMS_LOCATION'], 'etc',
                                     config['VirtualOrganisation'],
                                     'glite_wmsui.conf')
        temp_wms_conf = tempfile.NamedTemporaryFile(suffix='.conf',
                                                    delete=False)
Exemple #43
0
from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList
from Ganga.GPIDev.Schema import Schema, Version, SimpleItem, ComponentItem
from Ganga.GPIDev.Adapters.IGangaFile import IGangaFile
from Ganga.GPIDev.Lib.File import FileUtils
from Ganga.GPIDev.Lib.Job.Job import Job
from Ganga.Utility.files import expandfilename
from Ganga.Core.exceptions import GangaFileError
from GangaDirac.Lib.Utilities.DiracUtilities import getDiracEnv, execute, GangaDiracError
import Ganga.Utility.Config
from Ganga.Runtime.GPIexport import exportToGPI
from Ganga.GPIDev.Credentials import require_credential
from GangaDirac.Lib.Credentials.DiracProxy import DiracProxy, DiracProxyInfo
from Ganga.Utility.Config import getConfig
from Ganga.Utility.logging import getLogger
from GangaDirac.Lib.Backends.DiracUtils import getAccessURLs
configDirac = getConfig('DIRAC')
logger = getLogger()
regex = re.compile('[*?\[\]]')

global stored_list_of_sites
stored_list_of_sites = []


class DiracFile(IGangaFile):
    """
    File stored on a DIRAC storage element

    Usage:

        Some common use cases:
Exemple #44
0
def getShell(middleware='EDG', force=False):
    """
    Utility function for getting Grid Shell.
    Caller should take responsiblity of credential checking if proxy is needed.

    Argumennts:

     middleware - grid m/w used 
     force      - False : if the shell already exists in local cache return the previous created instance
                  True  : recreate the shell and if not None update the cache
    """

    logger = getLogger()

    if not middleware:
        logger.debug('No middleware specified, assuming default EDG')
        middleware = 'EDG'

    if middleware in _allShells.keys() and not force:
        return _allShells[middleware]

    values = {}
    for key in ['X509_USER_PROXY', 'X509_CERT_DIR', 'X509_VOMS_DIR']:
        try:
            values[key] = os.environ[key]
        except KeyError:
            pass

    configname = ""
    if middleware == 'EDG' or middleware == 'GLITE':
        configname = 'LCG'
    else:
        configname = middleware

    config = None
    try:
        config = getConfig(configname)
    except:
        logger.warning(
            '[%s] configuration section not found. Cannot set up a proper grid shell.' % configname)
        return None

    s = None

    key = '%s_SETUP' % middleware

    # 1. check if the *_SETUP is changed by user -> take the user's value as session value
    # 2. else check if *_LOCATION is defined as env. variable -> do nothing (ie. create shell without any lcg setup)
    # 3. else take the default *_SETUP as session value

    MIDDLEWARE_LOCATION = '%s_LOCATION' % middleware

    if config.getEffectiveLevel(key) == 2 and MIDDLEWARE_LOCATION in os.environ:
        s = Shell()
    else:
        if os.path.exists(config[key]):
            # FIXME: Hardcoded rule for ARC middleware setup (pass explicitly
            # the $ARC_LOCATION as argument), this is hardcoded to maintain
            # backwards compatibility (and avoid any side effects) for EDG and
            # GLITE setup scripts which did not take any arguments
            if key.startswith('ARC') and MIDDLEWARE_LOCATION in os.environ:
                s = Shell(
                    config[key], setup_args=[os.environ[MIDDLEWARE_LOCATION]])
            else:
                s = Shell(config[key])
        else:
            logger.warning("Configuration of %s for %s: " %
                           (middleware, configname))
            logger.warning("File not found: %s" % config[key])

    if s:
        for key, val in values.items():
            s.env[key] = val

        # check and set env. variables for default LFC setup
        if 'LFC_HOST' not in s.env:
            try:
                s.env['LFC_HOST'] = config['DefaultLFC']
            except ConfigError:
                pass

        if 'LFC_CONNTIMEOUT' not in s.env:
            s.env['LFC_CONNTIMEOUT'] = '20'

        if 'LFC_CONRETRY' not in s.env:
            s.env['LFC_CONRETRY'] = '0'

        if 'LFC_CONRETRYINT' not in s.env:
            s.env['LFC_CONRETRYINT'] = '1'

        _allShells[middleware] = s

    return s
Exemple #45
0
from Ganga.Utility.Config import getConfig, ConfigError
from Ganga.Utility.root import getrootsys, getpythonhome

from Ganga.Core.exceptions import ApplicationPrepareError

import Ganga.Utility.logging
import inspect
import os
import sys
import tempfile
from Ganga.Utility.files import expandfilename

from Ganga.GPIDev.Base.Proxy import getName

logger = Ganga.Utility.logging.getLogger()
config = getConfig('ROOT')


def getDefaultScript():
    name = os.path.join(
        os.path.dirname(
            os.path.abspath(inspect.getfile(inspect.currentframe()))),
        'defaultRootScript.C')
    return File(name=name)


class Root(IPrepareApp):
    """
    Root application -- running ROOT

    To run a job in ROOT you need to specify the CINT script to be
Exemple #46
0
    def preparejob(self, jobconfig, master_input_sandbox):

        job = self.getJobObject()
        # print str(job.backend_output_postprocess)
        mon = job.getMonitoringService()
        import Ganga.Core.Sandbox as Sandbox
        from Ganga.GPIDev.Lib.File import File
        from Ganga.Core.Sandbox.WNSandbox import PYTHON_DIR
        import inspect

        fileutils = File(inspect.getsourcefile(Ganga.Utility.files),
                         subdir=PYTHON_DIR)

        sharedfiles = jobconfig.getSharedFiles()

        subjob_input_sandbox = job.createPackedInputSandbox(
            jobconfig.getSandboxFiles() + [fileutils])

        appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings()
        if self.nice:
            appscriptpath = ['nice', '-n %d' % self.nice] + appscriptpath
        if self.nice < 0:
            logger.warning(
                'increasing process priority is often not allowed, your job may fail due to this'
            )

        sharedoutputpath = job.getOutputWorkspace().getPath()
        ## FIXME DON'T just use the blind list here, request the list of files to be in the output from a method.
        outputpatterns = jobconfig.outputbox
        environment = dict() if jobconfig.env is None else jobconfig.env

        import tempfile
        workdir = tempfile.mkdtemp(dir=config['location'])

        import inspect
        script_location = os.path.join(
            os.path.dirname(
                os.path.abspath(inspect.getfile(inspect.currentframe()))),
            'LocalHostExec.py.template')

        from Ganga.GPIDev.Lib.File import FileUtils
        script = FileUtils.loadScript(script_location, '')

        script = script.replace('###INLINEMODULES###',
                                inspect.getsource(Sandbox.WNSandbox))

        from Ganga.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation
        from Ganga.Utility.Config import getConfig
        jobidRepr = repr(job.getFQID('.'))

        script = script.replace(
            '###OUTPUTSANDBOXPOSTPROCESSING###',
            getWNCodeForOutputSandbox(job, ['stdout', 'stderr', '__syslog__'],
                                      jobidRepr))
        script = script.replace('###OUTPUTUPLOADSPOSTPROCESSING###',
                                getWNCodeForOutputPostprocessing(job, ''))
        script = script.replace('###DOWNLOADINPUTFILES###',
                                getWNCodeForDownloadingInputFiles(job, ''))
        script = script.replace('###CREATEINPUTDATALIST###',
                                getWNCodeForInputdataListCreation(job, ''))

        script = script.replace('###APPLICATION_NAME###',
                                repr(getName(job.application)))
        script = script.replace(
            '###INPUT_SANDBOX###',
            repr(subjob_input_sandbox + master_input_sandbox + sharedfiles))
        script = script.replace('###SHAREDOUTPUTPATH###',
                                repr(sharedoutputpath))
        script = script.replace('###APPSCRIPTPATH###', repr(appscriptpath))
        script = script.replace('###OUTPUTPATTERNS###', str(outputpatterns))
        script = script.replace('###JOBID###', jobidRepr)
        script = script.replace('###ENVIRONMENT###', repr(environment))
        script = script.replace('###WORKDIR###', repr(workdir))
        script = script.replace('###INPUT_DIR###',
                                repr(job.getStringInputDir()))

        self.workdir = workdir

        script = script.replace('###GANGADIR###',
                                repr(getConfig('System')['GANGA_PYTHONPATH']))

        wrkspace = job.getInputWorkspace()
        scriptPath = wrkspace.writefile(FileBuffer('__jobscript__', script),
                                        executable=1)

        return scriptPath
Exemple #47
0
from Ganga.GPIDev.Adapters.IApplication import IApplication
from Ganga.GPIDev.Adapters.IPrepareApp import IPrepareApp
from Ganga.GPIDev.Adapters.IRuntimeHandler import IRuntimeHandler
from Ganga.GPIDev.Schema import *

from Ganga.Utility.Config import getConfig

from Ganga.GPIDev.Lib.File import *
from Ganga.GPIDev.Lib.Registry.PrepRegistry import ShareRef
from Ganga.GPIDev.Base.Proxy import isType
from Ganga.Core import ApplicationConfigurationError

import os, shutil, commands, re
from Ganga.Utility.files import expandfilename
shared_path = os.path.join(
    expandfilename(getConfig('Configuration')['gangadir']), 'shared',
    getConfig('Configuration')['user'])


class Highland(IApplication):
    """
    Highland application running any highland executables.

    The required input for this module are:
        app.exe = 'RunNumuCCAnalysis.exe'
        app.cmtsetup = '/home/me/myT2KWork/setup.sh'
        app.outputfile = 'myFantasticResults.root'

    NOTE: Ganga will run a bash script so the CMT setup script must be in sh/bash.

    It is also possible to pass command line arguments to the executable.
Exemple #48
0
    def updateMonitoringInformation(jobs):

        jobDict = {}
        for job in jobs:
            if job.backend.id:
                jobDict[job.backend.id] = job

        idList = jobDict.keys()

        if not idList:
            return

        queryCommand = " ".join\
            ([
                "condor_q -global" if getConfig(
                    "Condor")["query_global_queues"] else "condor_q",
                "-format \"%s \" GlobalJobId",
                "-format \"%s \" RemoteHost",
                "-format \"%d \" JobStatus",
                "-format \"%f\\n\" RemoteUserCpu"
            ])
        status, output = commands.getstatusoutput(queryCommand)
        if 0 != status:
            logger.error("Problem retrieving status for Condor jobs")
            return

        if ("All queues are empty" == output):
            infoList = []
        else:
            infoList = output.split("\n")

        allDict = {}
        for infoString in infoList:
            tmpList = infoString.split()
            id, host, status, cputime = ("", "", "", "")
            if 3 == len(tmpList):
                id, status, cputime = tmpList
            if 4 == len(tmpList):
                id, host, status, cputime = tmpList
            if id:
                allDict[id] = {}
                allDict[id]["status"] = Condor.statusDict[status]
                allDict[id]["cputime"] = cputime
                allDict[id]["host"] = host

        fg = Foreground()
        fx = Effects()
        status_colours = {'submitted': fg.orange,
                          'running': fg.green,
                          'completed': fg.blue}

        for id in idList:

            printStatus = False
            if jobDict[id].status == "killed":
                continue

            localId = id.split("#")[-1]
            globalId = id

            if globalId == localId:
                queryCommand = " ".join\
                    ([
                        "condor_q -global" if getConfig(
                            "Condor")["query_global_queues"] else "condor_q",
                        "-format \"%s\" GlobalJobId",
                        id
                    ])
                status, output = commands.getstatusoutput(queryCommand)
                if 0 == status:
                    globalId = output

            if globalId in allDict.keys():
                status = allDict[globalId]["status"]
                host = allDict[globalId]["host"]
                cputime = allDict[globalId]["cputime"]
                if status != jobDict[id].backend.status:
                    printStatus = True
                    stripProxy(jobDict[id])._getWriteAccess()
                    jobDict[id].backend.status = status
                    if jobDict[id].backend.status == "Running":
                        jobDict[id].updateStatus("running")

                if host:
                    if jobDict[id].backend.actualCE != host:
                        jobDict[id].backend.actualCE = host
                jobDict[id].backend.cputime = cputime
            else:
                jobDict[id].backend.status = ""
                outDir = jobDict[id].getOutputWorkspace().getPath()
                condorLogPath = "".join([outDir, "condorLog"])
                checkExit = True
                if os.path.isfile(condorLogPath):
                    checkExit = False
                    for line in open(condorLogPath):
                        if -1 != line.find("terminated"):
                            checkExit = True
                            break
                        if -1 != line.find("aborted"):
                            checkExit = True
                            break

                if checkExit:
                    printStatus = True
                    stdoutPath = "".join([outDir, "stdout"])
                    jobStatus = "failed"
                    if os.path.isfile(stdoutPath):
                        with open(stdoutPath) as stdout:
                            lineList = stdout.readlines()
                        try:
                            exitLine = lineList[-1]
                            exitCode = exitLine.strip().split()[-1]
                        except IndexError:
                            exitCode = -1

                        if exitCode.isdigit():
                            jobStatus = "completed"
                        else:
                            logger.error("Problem extracting exit code from job %s. Line found was '%s'." % (
                                jobDict[id].fqid, exitLine))

                    jobDict[id].updateStatus(jobStatus)

            if printStatus:
                if jobDict[id].backend.actualCE:
                    hostInfo = jobDict[id].backend.actualCE
                else:
                    hostInfo = "Condor"
                status = jobDict[id].status
                if status in status_colours:
                    colour = status_colours[status]
                else:
                    colour = fg.magenta
                if "submitted" == status:
                    preposition = "to"
                else:
                    preposition = "on"

                if jobDict[id].backend.status:
                    backendStatus = "".join\
                        ([" (", jobDict[id].backend.status, ") "])
                else:
                    backendStatus = ""

                logger.info(colour + 'Job %s %s%s %s %s - %s' + fx.normal,
                            jobDict[
                                id].fqid, status, backendStatus, preposition, hostInfo,
                            time.strftime('%c'))

        return None
Exemple #49
0
    def report_inner(job=None, isJob=False, isTask=False):

        userInfoDirName = "userreport"
        tempDirName = "reportsRepository"
        # job relevant info
        jobSummaryFileName = "jobsummary.txt"
        jobFullPrintFileName = "jobfullprint.txt"
        repositoryPath = "repository/$usr/LocalXML/6.0/jobs/$thousandsNumxxx"
        # task relevant info
        taskSummaryFileName = "tasksummary.txt"
        taskFullPrintFileName = "taskfullprint.txt"
        tasksRepositoryPath = "repository/$usr/LocalXML/6.0/tasks/$thousandsNumxxx"
        # user's info
        environFileName = "environ.txt"
        userConfigFileName = "userconfig.txt"
        defaultConfigFileName = "gangarc.txt"
        ipythonHistoryFileName = "ipythonhistory.txt"
        gangaLogFileName = "gangalog.txt"
        jobsListFileName = "jobslist.txt"
        tasksListFileName = "taskslist.txt"
        from Ganga.Utility import Config
        uploadFileServer = Config.getConfig('Feedback')['uploadServer']
        #uploadFileServer= "http://gangamon.cern.ch/django/errorreports/"
        #uploadFileServer= "http://ganga-ai-02.cern.ch/django/errorreports/"
        #uploadFileServer= "http://127.0.0.1:8000/errorreports"

        def printDictionary(dictionary, file=sys.stdout):
            for k, v in dictionary.iteritems():
                print('%s: %s' % (k, v), file=file)

                if k == 'PYTHONPATH':
                    global PYTHON_PATH
                    PYTHON_PATH = v

        def extractFileObjects(fileName, targetDirectoryName):
            try:
                fileToRead = open(fileName, 'r')
                try:
                    fileText = fileToRead.read()
                    import re
                    pattern = "File\(name=\'(.+?)\'"
                    matches = re.findall(pattern, fileText)

                    for fileName in matches:
                        fileName = os.path.expanduser(fileName)
                        targetFileName = os.path.join(
                            targetDirectoryName, os.path.basename(fileName))
                        shutil.copyfile(fileName, targetFileName)

                finally:
                    fileToRead.close()
            # except IOError, OSError:
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

        def writeErrorLog(errorMessage):
            try:
                fileToWrite = open(errorLogPath, 'a')
                try:
                    fileToWrite.write(errorMessage)
                    fileToWrite.write("\n")
                except Exception as err:
                    logger.debug("Err: %s" % str(err))
                    raise
                finally:
                    fileToWrite.close()
            except Exception as err2:
                logger.debug("Err: %s" % str(err2))
                pass

        def writeStringToFile(fileName, stringToWrite):

            try:
                # uncomment this to try the error logger
                #fileName = '~/' + fileName
                fileToWrite = open(fileName, 'w')
                try:
                    fileToWrite.write(stringToWrite)
                except Exception as err:
                    logger.debug("Err: %s" % str(err))
                    raise err
                finally:
                    fileToWrite.close()
            # except IOError:
            except Exception as err:
                logger.debug("Err2: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

        def renameDataFiles(directory):

            for fileName in os.listdir(directory):
                fullFileName = os.path.join(directory, fileName)
                if os.path.isfile(fullFileName):
                    if fileName == 'data':
                        os.rename(fullFileName, fullFileName + '.txt')
                else:
                    renameDataFiles(fullFileName)

        import shutil
        import tarfile
        import tempfile
        import os

        userHomeDir = os.getenv("HOME")
        tempDir = tempfile.mkdtemp()

        errorLogPath = os.path.join(tempDir, 'reportErrorLog.txt')

        fullPathTempDir = os.path.join(tempDir, tempDirName)
        fullLogDirName = ''
        # create temp dir and specific dir for the job/user

        try:
            if not os.path.exists(fullPathTempDir):
                os.mkdir(fullPathTempDir)

            import datetime
            now = datetime.datetime.now()
            userInfoDirName = userInfoDirName + \
                now.strftime("%Y-%m-%d-%H:%M:%S")
            fullLogDirName = os.path.join(fullPathTempDir, userInfoDirName)

            # if report directory exists -> delete it's content(we would like
            # last version of the report)
            if os.path.exists(fullLogDirName):
                shutil.rmtree(fullLogDirName)

            os.mkdir(fullLogDirName)
        # except OSError:
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import os.environ in a file
        fullEnvironFileName = os.path.join(fullLogDirName, environFileName)

        try:
            inputFile = open(fullEnvironFileName, 'w')
            try:
                printDictionary(os.environ, file=inputFile)

                print('OS VERSION : ' + platform.platform(), file=inputFile)

            finally:
                inputFile.close()
        # except IOError
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import user config in a file
        userConfigFullFileName = os.path.join(
            fullLogDirName, userConfigFileName)

        try:
            inputFile = open(userConfigFullFileName, 'w')
            try:

                print("#GANGA_VERSION = %s" %
                      config.System.GANGA_VERSION, file=inputFile)

                global GANGA_VERSION
                GANGA_VERSION = config.System.GANGA_VERSION

                # this gets the default values
                # Ganga.GPIDev.Lib.Config.Config.print_config_file()

                # this should get the changed values
                for c in config:
                    print(config[c], file=inputFile)

            finally:
                inputFile.close()
        # except IOError does not catch the exception ???
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # write gangarc - default configuration
        defaultConfigFullFileName = os.path.join(
            fullLogDirName, defaultConfigFileName)

        try:
            outputFile = open(os.path.join(userHomeDir, '.gangarc'), 'r')

            try:
                writeStringToFile(defaultConfigFullFileName, outputFile.read())
            finally:
                outputFile.close()

        # except IOError does not catch the exception ???
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import ipython history in a file
        try:
            ipythonFile = open(
                os.path.join(os.environ['IPYTHONDIR'], 'history'), 'r')

            try:
                lastIPythonCommands = ipythonFile.readlines()[-20:]
                writeStringToFile(os.path.join(
                    fullLogDirName, ipythonHistoryFileName), '\n'.join(lastIPythonCommands))
                #writeStringToFile(os.path.join(fullLogDirName, ipythonHistoryFileName), ipythonFile.read())
            finally:
                ipythonFile.close()
        # except IOError does not catch the exception ???
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import gangalog in a file
        userLogFileLocation = config["Logging"]._logfile
        userLogFileLocation = os.path.expanduser(userLogFileLocation)

        try:
            gangaLogFile = open(userLogFileLocation, 'r')
            try:
                writeStringToFile(
                    os.path.join(fullLogDirName, gangaLogFileName), gangaLogFile.read())
            finally:
                gangaLogFile.close()
        # except IOError:
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import the result of jobs command in the report
        jobsListFullFileName = os.path.join(fullLogDirName, jobsListFileName)

        try:
            outputFile = open(jobsListFullFileName, 'w')
            try:

                from Ganga.GPI import jobs
                print(jobs, file=outputFile)

            finally:
                outputFile.close()

        # except IOError does not catch the exception ???
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # import the result of tasks command in the report
        tasksListFullFileName = os.path.join(fullLogDirName, tasksListFileName)

        try:
            outputFile = open(tasksListFullFileName, 'w')
            try:

                from Ganga.GPI import tasks
                print(tasks, file=outputFile)

            finally:
                outputFile.close()

        # except IOError does not catch the exception ???
        except Exception as err:
            logger.debug("Err: %s" % str(err))
            writeErrorLog(str(sys.exc_info()[1]))

        # save it here because we will change fullLogDirName, but we want this
        # to be the archive and to be deleted
        folderToArchive = fullLogDirName

        # import job relevant info
        if (job is not None and isJob):

            global JOB_REPORT, APPLICATION_NAME, BACKEND_NAME

            JOB_REPORT = True
            APPLICATION_NAME = getName(job.application)
            BACKEND_NAME = getName(job.backend)

            # create job folder
            jobFolder = 'job_%s' % str(job.fqid)
            fullLogDirName = os.path.join(fullLogDirName, jobFolder)
            os.mkdir(fullLogDirName)

            # import job summary in a file
            fullJobSummaryFileName = os.path.join(
                fullLogDirName, jobSummaryFileName)
            writeStringToFile(fullJobSummaryFileName, str(job))

            # import job full print in a file
            fullJobPrintFileName = os.path.join(
                fullLogDirName, jobFullPrintFileName)

            try:
                inputFile = open(fullJobPrintFileName, 'w')
                try:
                    full_print(job, inputFile)
                finally:
                    inputFile.close()
            # except IOError, OSError:
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # extract file objects
            try:
                fileObjectsPath = os.path.join(fullLogDirName, 'fileobjects')
                os.mkdir(fileObjectsPath)
                extractFileObjects(fullJobSummaryFileName, fileObjectsPath)
            # except OSError:
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # copy dir of the job ->input/output and subjobs
            try:
                parentDir, currentDir = os.path.split(job.inputdir[:-1])
                workspaceDir = os.path.join(fullLogDirName, 'workspace')
                shutil.copytree(parentDir, workspaceDir)
            # except IOError, OSError
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # copy shared area of the job
            try:

                if hasattr(job.application, 'is_prepared'):
                    if job.application.is_prepared is not None and job.application.is_prepared is not True:
                        import os
                        from Ganga.Utility.Config import getConfig
                        from Ganga.Utility.files import expandfilename
                        shared_path = os.path.join(expandfilename(getConfig(
                            'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user'])
                        shareddir = os.path.join(
                            shared_path, job.application.is_prepared.name)
                        if os.path.isdir(shareddir):

                            sharedAreaDir = os.path.join(
                                fullLogDirName, 'sharedarea')
                            shutil.copytree(shareddir, sharedAreaDir)
            # except IOError, OSError
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # copy repository job file
            try:
                indexFileName = str(job.id) + '.index'

                repositoryPath = repositoryPath.replace(
                    '$usr', os.getenv("USER"))

                # check if the job is subjob -> different way of forming the
                # path to the repository
                is_subjob = job.fqid.find('.') > -1

                if is_subjob:

                    jobid, subjobid = job.fqid.split(
                        '.')[0], job.fqid.split('.')[1]
                    repositoryPath = repositoryPath.replace(
                        '$thousandsNum', str(int(jobid) / 1000))
                    repositoryPath = os.path.join(repositoryPath, jobid)

                else:
                    repositoryPath = repositoryPath.replace(
                        '$thousandsNum', str(job.id / 1000))

                repositoryFullPath = os.path.join(
                    config.Configuration.gangadir, repositoryPath)
                indexFileSourcePath = os.path.join(
                    repositoryFullPath, indexFileName)
                repositoryFullPath = os.path.join(
                    repositoryFullPath, str(job.id))

                repositoryTargetPath = os.path.join(
                    fullLogDirName, 'repository', str(job.id))

                os.mkdir(os.path.join(fullLogDirName, 'repository'))

                shutil.copytree(repositoryFullPath, repositoryTargetPath)
                # data files are copied but can not be opened -> add .txt to
                # their file names
                renameDataFiles(repositoryTargetPath)

                if not is_subjob:
                    # copy .index file
                    indexFileTargetPath = os.path.join(
                        fullLogDirName, 'repository', indexFileName)
                    shutil.copyfile(indexFileSourcePath, indexFileTargetPath)

            # except OSError, IOError:
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

        # import task relevant info
        if (job is not None and isTask):
            # job is actually a task object
            task = job
            # create task folder
            taskFolder = 'task_%s' % str(task.id)
            fullLogDirName = os.path.join(fullLogDirName, taskFolder)
            os.mkdir(fullLogDirName)

            # import task summary in a file
            fullTaskSummaryFileName = os.path.join(
                fullLogDirName, taskSummaryFileName)
            writeStringToFile(fullTaskSummaryFileName, str(task))

            # import task full print in a file
            fullTaskPrintFileName = os.path.join(
                fullLogDirName, taskFullPrintFileName)

            try:
                inputFile = open(fullTaskPrintFileName, 'w')
                try:
                    full_print(task, inputFile)
                except Exception as err:
                    logger.debug("Err: %s" % str(err))
                    raise err
                finally:
                    inputFile.close()
            # except IOError, OSError:
            except Exception as err:
                logger.debug("Err2: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # copy shared area of the task
            try:
                if len(task.transforms) > 0:
                    if hasattr(task.transforms[0], 'application') and hasattr(task.transforms[0].application, 'is_prepared'):
                        if task.transforms[0].application.is_prepared is not None and task.transforms[0].application.is_prepared is not True:
                            import os
                            from Ganga.Utility.Config import getConfig
                            from Ganga.Utility.files import expandfilename
                            shared_path = os.path.join(expandfilename(getConfig(
                                'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user'])
                            shareddir = os.path.join(
                                shared_path, task.transforms[0].application.is_prepared.name)
                            if os.path.isdir(shareddir):

                                sharedAreaDir = os.path.join(
                                    fullLogDirName, 'sharedarea')
                                shutil.copytree(shareddir, sharedAreaDir)
            # except IOError, OSError
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

            # copy repository task file
            try:
                indexFileName = str(task.id) + '.index'

                tasksRepositoryPath = tasksRepositoryPath.replace(
                    '$usr', os.getenv("USER"))
                tasksRepositoryPath = tasksRepositoryPath.replace(
                    '$thousandsNum', str(task.id / 1000))

                repositoryFullPath = os.path.join(
                    config.Configuration.gangadir, tasksRepositoryPath)
                indexFileSourcePath = os.path.join(
                    repositoryFullPath, indexFileName)
                repositoryFullPath = os.path.join(
                    repositoryFullPath, str(task.id))

                repositoryTargetPath = os.path.join(
                    fullLogDirName, 'repository', str(task.id))

                os.mkdir(os.path.join(fullLogDirName, 'repository'))

                shutil.copytree(repositoryFullPath, repositoryTargetPath)
                # data files are copied but can not be opened -> add .txt to
                # their file names
                renameDataFiles(repositoryTargetPath)

                # copy .index file
                indexFileTargetPath = os.path.join(
                    fullLogDirName, 'repository', indexFileName)
                shutil.copyfile(indexFileSourcePath, indexFileTargetPath)

            # except OSError, IOError:
            except Exception as err:
                logger.debug("Err %s" % str(err))
                writeErrorLog(str(sys.exc_info()[1]))

        resultArchive = '%s.tar.gz' % folderToArchive

        try:
            resultFile = tarfile.TarFile.open(resultArchive, 'w:gz')
            try:
                resultFile.add(
                    folderToArchive, arcname=os.path.basename(folderToArchive))
                # put the error log in the archive
                if(os.path.exists(errorLogPath)):
                    resultFile.add(
                        errorLogPath, arcname=os.path.basename(errorLogPath))
            except Exception as err:
                logger.debug("Err: %s" % str(err))
                raise
            finally:
                resultFile.close()
        except Exception as err:
            logger.debug("Err2: %s" % str(err))
            raise  # pass

        # remove temp dir
        if(os.path.exists(folderToArchive)):
            shutil.rmtree(folderToArchive)

        # print the error if there is something
        if os.path.exists(errorLogPath):
            logger.error('')
            logger.error('An error occured while collecting report information : ' + open(errorLogPath, 'r').read())
            logger.error('')

        # delete the errorfile from user's pc
        if(os.path.exists(errorLogPath)):
            os.remove(errorLogPath)

        # return the path to the archive and the path to the upload server
        return (resultArchive, uploadFileServer, tempDir)
Exemple #50
0
    def remove(self, force=False, removeLocal=False):
        """
        Removes file from remote storage ONLY by default
        """
        massStorageConfig = getConfig(
            'Output')['MassStorageFile']['uploadOptions']
        rm_cmd = massStorageConfig['rm_cmd']

        if force == True:
            _auto_delete = True
        else:
            _auto_delete = False

        for i in self.locations:

            if not _auto_delete:

                keyin = None

                while keyin is None:
                    keyin = raw_input(
                        "Do you want to delete file %s at Location: %s ? [y/n] "
                        % (str(self.namePattern), str(i)))
                    if keyin == 'y':
                        _delete_this = True
                    elif keyin == 'n':
                        _delete_this = False
                    else:
                        logger.warning("y/n please!")
                        keyin = None
            else:
                _delete_this = True

            if _delete_this:
                logger.info("Deleting File at Location: %s")
                self.execSyscmdSubprocess('%s %s' % (rm_cmd, i))
                self.locations.pop(i)

        if removeLocal:

            sourceDir = ''
            if self.localDir == '':
                import os
                _CWD = os.getcwd()
                if os.path.isfile(os.path.join(_CWD, self.namePattern)):
                    sourceDir = _CWD
            else:
                sourceDir = self.localDir

            _localFile = os.path.join(sourceDir, self.namePattern)

            if os.path.isfile(_localFile):

                if force:
                    _actual_delete = True
                else:

                    keyin = None
                    while keyin is None:
                        keyin = raw_input(
                            "Do you want to remove the local File: %s ? ([y]/n) "
                            % str(_localFile))
                        if keyin in ['y', '']:
                            _actual_delete = True
                        elif keyin == 'n':
                            _actual_delete = False
                        else:
                            logger.warning("y/n please!")
                            keyin = None

                if _actual_delete:
                    import time
                    remove_filename = _localFile + "_" + str(
                        time.time()) + '__to_be_deleted_'

                    try:
                        os.rename(_localFile, remove_filename)
                    except OSError as err:
                        logger.warning(
                            "Error in first stage of removing file: %s" %
                            remove_filename)
                        remove_filename = _localFile

                    try:
                        os.remove(remove_filename)
                    except OSError as err:
                        if err.errno != errno.ENOENT:
                            logger.error("Error in removing file: %s" %
                                         str(remove_filename))
                            raise
                        pass
        return
Exemple #51
0
from __future__ import print_function
from Ganga.Utility.Config import getConfig, setSessionValuesFromFiles

if __name__ == "__main__":

    import Ganga.Utility.logging

    Ganga.Utility.logging.config['Ganga.Utility.Config'] = 'DEBUG'
    Ganga.Utility.logging.bootstrap()

    print('Basic Test')
    import sys

    c1 = getConfig('C1')
    c1.setDefaultOptions({'a': 1, 'aa': 'xx'})

    c2 = getConfig('C2')
    c2['b'] = 2
    c2['bb'] = 'yy'

    print('path=', sys.path[0])

    setSessionValuesFromFiles([sys.path[0] + '/ConfigTest.ini'])

    print("C1")
    print(c1.getEffectiveOptions())
    print()
    print("C2")
    print(c2.getEffectiveOptions())

    assert(c1['a'] == 3)
Exemple #52
0
import time
import traceback
import sys
import Ganga.GPIDev.Lib.Registry.RegistrySlice
from Ganga.GPIDev.Lib.Registry.JobRegistry import JobRegistrySliceProxy
from Ganga.Core.GangaRepository.Registry import Registry, RegistryError, RegistryKeyError, RegistryAccessError, RegistryFlusher
from Ganga.GPIDev.Base.Proxy import stripProxy, getName, isType
from Ganga.Utility.ColourText import ANSIMarkup, overview_colours, status_colours, fgcol

from Ganga.Utility.logging import getLogger

logger = getLogger()

from Ganga.Utility.Config import getConfig

config = getConfig('Tasks')

markup = ANSIMarkup()
str_run = markup("run", overview_colours["running"])
str_fail = markup("fail", overview_colours["failed"])
str_hold = markup("hold", overview_colours["hold"])
str_bad = markup("bad", overview_colours["bad"])


class TaskRegistry(Registry):
    def __init__(self, name, doc):

        super(TaskRegistry, self).__init__(name, doc)

        self._main_thread = None
Exemple #53
0
    except Exception as err:
        logger.error("problems with loading plugins for %s -- ignored" %
                     r.name)
        logger.error('Reason: %s' % str(err))

# ------------------------------------------------------------------------------------
# make all plugins visible in GPI
for k in allPlugins.allCategories():
    for n in allPlugins.allClasses(k):
        cls = allPlugins.find(k, n)
        if not cls._declared_property('hidden'):
            exportToPublicInterface(n, cls._proxyClass, 'Classes')

# ------------------------------------------------------------------------------------
# set the default value for the plugins
default_plugins_cfg = getConfig("Plugins")

for opt in default_plugins_cfg:
    try:
        category, tag = opt.split('_')
    except ValueError, err:
        logger.warning("do not understand option %s in [Plugins]", opt)
        logger.debug('Reason: want %s' % str(err))
    else:
        if tag == 'default':
            try:
                allPlugins.setDefault(category, default_plugins_cfg[opt])
            except Ganga.Utility.Plugin.PluginManagerError as x:
                logger.warning('cannot set the default plugin "%s": %s' %
                               (opt, x))
        else:
Exemple #54
0
    def _parse_options(self):
        try:
            parser = self._get_parser()
        except ApplicationConfigurationError as err:
            logger.debug("_get_parser Error:\n%s" % str(err))
            raise err

        share_dir = os.path.join(
            expandfilename(getConfig('Configuration')['gangadir']), 'shared',
            getConfig('Configuration')['user'], self.is_prepared.name)
        # Need to remember to create the buffer as the perpare methods returns
        # are merely copied to the inputsandbox so must alread exist.
        #   share_path = os.path.join(share_dir,'inputsandbox')
        #   if not os.path.isdir(share_path): os.makedirs(share_path)

        fillPackedSandbox([FileBuffer('options.pkl', parser.opts_pkl_str)],
                          os.path.join(
                              share_dir, 'inputsandbox',
                              '_input_sandbox_%s.tar' % self.is_prepared.name))
        # FileBuffer(os.path.join(share_path,'options.pkl'),
        # parser.opts_pkl_str).create()
        # self.prep_inputbox.append(File(os.path.join(share_dir,'options.pkl')))

        # Check in any input datasets defined in optsfiles and allow them to be
        # read into the
        inputdata = parser.get_input_data()
        if len(inputdata.files) > 0:
            logger.warning(
                'Found inputdataset defined in optsfile, '
                'this will get pickled up and stored in the '
                'prepared state. Any change to the options/data will '
                'therefore require an unprepare first.')
            logger.warning(
                'NOTE: the prefered way of working '
                'is to define inputdata in the job.inputdata field. ')
            logger.warning(
                'Data defined in job.inputdata will superseed optsfile data!')
            logger.warning(
                'Inputdata can be transfered from optsfiles to the job.inputdata field '
                'using job.inputdata = job.application.readInputData(optsfiles)'
            )
            share_path = os.path.join(share_dir, 'inputdata')
            if not os.path.isdir(share_path):
                os.makedirs(share_path)
            f = open(os.path.join(share_path, 'options_data.pkl'), 'w+b')
            pickle.dump(inputdata, f)
            f.close()

        # store the outputsandbox/outputdata defined in the options file
        # Can remove this when no-longer need to define outputdata in optsfiles
        # Can remove the if job: when look into how to do prepare for standalone app
        # move into RuntimeHandler move whole parsing into options maybe?

        # try and get the job object
        # not present if preparing standalone app

        # must change this as prepare should be seperate from the jpb.inputdata

        share_path = os.path.join(share_dir, 'output')
        if not os.path.isdir(share_path):
            os.makedirs(share_path)
        f = open(os.path.join(share_path, 'options_parser.pkl'), 'w+b')
        pickle.dump(parser, f)
        f.close()
Exemple #55
0
import pytest
try:
    import unittest.mock as mock
except ImportError:
    import mock

from Ganga.Utility.Config import getConfig, makeConfig
makeConfig('defaults_DiracProxy', '')
getConfig('defaults_DiracProxy').addOption('group', 'some_group', '')
from GangaDirac.Lib.Credentials.DiracProxy import DiracProxy, DiracProxyInfo
getConfig('defaults_DiracProxy').setSessionValue('group', 'some_group')


class FakeShell(object):
    """
    A mock version of Shell which allows to customise the return values

    Examples:
        >>> s = FakeShell()
        >>> assert s.cmd1.call_count == 0
        >>> assert s.cmd1('foo') == (0, '', '')
        >>> assert s.cmd1.call_count == 1
        >>> assert s.cmd1('something -vo') == (0, 'some_group', '')
        >>> assert s.cmd1.call_count == 2
    """
    vo = 'some_group'
    timeleft = 100

    def __init__(self):
        self.env = {}
        self.cmd1 = mock.Mock(wraps=self._cmd1)
Exemple #56
0
    def prepare(self, app, appsubconfig, appmasterconfig, jobmasterconfig):
        """
        Prepare the RTHandler in order to submit to the Dirac backend
        Args:
            app (GaudiExec): This application is only expected to handle GaudiExec Applications here
            appconfig (unknown): Output passed from the application configuration call
            appmasterconfig (unknown): Output passed from the application master_configure call
            jobmasterconfig (tuple): Output from the master job prepare step
        """
        cred_req = app.getJobObject().backend.credential_requirements
        check_creds(cred_req)

        # NB this needs to be removed safely
        # Get the inputdata and input/output sandbox in a sorted way
        inputsandbox, outputsandbox = sandbox_prepare(app, appsubconfig,
                                                      appmasterconfig,
                                                      jobmasterconfig)
        input_data, parametricinput_data = dirac_inputdata(app)

        # We know we don't need this one
        inputsandbox = []

        job = app.getJobObject()

        # We can support inputfiles and opts_file here. Locally should be submitted once, remotely can be referenced.

        all_opts_files = app.getOptsFiles()

        for opts_file in all_opts_files:
            if isinstance(opts_file, DiracFile):
                inputsandbox += ['LFN:' + opts_file.lfn]

        # Sort out inputfiles we support
        for file_ in job.inputfiles:
            if isinstance(file_, DiracFile):
                inputsandbox += ['LFN:' + file_.lfn]
            if isinstance(file_, LocalFile):
                if job.master is not None and file_ not in job.master.inputfiles:
                    shutil.copy(
                        os.path.join(file_.localDir, file_.namePattern),
                        app.getSharedPath())
                    inputsandbox += [
                        os.path.join(app.getSharedPath(), file_.namePattern)
                    ]
            else:
                logger.error(
                    "Filetype: %s nor currently supported, please contact Ganga Devs if you require support for this with the DIRAC backend"
                    % getName(file_))
                raise ApplicationConfigurationError(
                    "Unsupported filetype: %s with DIRAC backend" %
                    getName(file_))

        master_job = job.master or job

        app.uploadedInput = master_job.application.uploadedInput
        app.jobScriptArchive = master_job.application.jobScriptArchive

        logger.debug("uploadedInput: %s" % app.uploadedInput)

        rep_data = app.uploadedInput.getReplicas()

        logger.debug("Replica info: %s" % rep_data)

        inputsandbox += ['LFN:' + app.uploadedInput.lfn]
        inputsandbox += ['LFN:' + app.jobScriptArchive.lfn]

        logger.debug("Input Sand: %s" % inputsandbox)

        logger.debug("input_data: %s" % input_data)

        outputfiles = [
            this_file for this_file in job.outputfiles
            if isinstance(this_file, DiracFile)
        ]

        scriptToRun = getScriptName(app)
        # Already added to sandbox uploaded as LFN

        # This code deals with the outputfiles as outputsandbox and outputdata for us
        lhcbdirac_outputfiles = lhcbdirac_outputfile_jdl(outputfiles)

        # NOTE special case for replicas: replicate string must be empty for no
        # replication
        dirac_script = script_generator(
            lhcbdiracAPI_script_template(),
            DIRAC_IMPORT=
            'from LHCbDIRAC.Interfaces.API.DiracLHCb import DiracLHCb',
            DIRAC_JOB_IMPORT=
            'from LHCbDIRAC.Interfaces.API.LHCbJob import LHCbJob',
            DIRAC_OBJECT='DiracLHCb()',
            JOB_OBJECT='LHCbJob()',
            NAME=mangle_job_name(app),
            EXE=os.path.join('jobScript', scriptToRun),
            EXE_ARG_STR='',
            EXE_LOG_FILE='Ganga_GaudiExec.log',
            ENVIRONMENT=None,  # app.env,
            INPUTDATA=input_data,
            PARAMETRIC_INPUTDATA=parametricinput_data,
            OUTPUT_SANDBOX=API_nullifier(outputsandbox),
            OUTPUTFILESSCRIPT=lhcbdirac_outputfiles,
            OUTPUT_PATH="",  # job.fqid,
            OUTPUT_SE=[],
            PLATFORM=app.platform,
            SETTINGS=diracAPI_script_settings(app),
            DIRAC_OPTS=job.backend.diracOpts,
            REPLICATE='True'
            if getConfig('DIRAC')['ReplicateOutputData'] else '',
            # leave the sandbox for altering later as needs
            # to be done in backend.submit to combine master.
            # Note only using 2 #s as auto-remove 3
            INPUT_SANDBOX=repr([f for f in inputsandbox]),
        )

        # NB
        # inputsandbox here isn't used by the DIRAC backend as we explicitly define the INPUT_SANDBOX here!

        # Return the output needed for the backend to submit this job
        return StandardJobConfig(dirac_script, inputbox=[], outputbox=[])
Exemple #57
0
    def __setattr__(self, attr, value):

        if attr == 'outputfiles':

            if value != []:
                if self.outputdata is not None:
                    logger.error(
                        'ITransform.outputdata is set, you can\'t set ITransform.outputfiles')
                    return
                elif self.outputsandbox != []:
                    logger.error(
                        'ITransform.outputsandbox is set, you can\'t set ITransform.outputfiles')
                    return

            # reduce duplicate values here, leave only duplicates for LCG,
            # where we can have replicas
            uniqueValuesDict = []
            uniqueValues = []

            for val in value:
                key = '%s%s' % (getName(val), val.namePattern)
                if key not in uniqueValuesDict:
                    uniqueValuesDict.append(key)
                    uniqueValues.append(val)
                elif getName(val) == 'LCGSEFile':
                    uniqueValues.append(val)

            super(ITransform, self).__setattr__(attr, uniqueValues)

        elif attr == 'inputfiles':

            if value != []:
                if self.inputsandbox != []:
                    logger.error(
                        'ITransform.inputsandbox is set, you can\'t set ITransform.inputfiles')
                    return

            super(ITransform, self).__setattr__(attr, value)

        elif attr == 'outputsandbox':

            if value != []:

                if getConfig('Output')['ForbidLegacyOutput']:
                    logger.error(
                        'Use of ITransform.outputsandbox is forbidden, please use ITransform.outputfiles')
                    return

                if self.outputfiles != []:
                    logger.error(
                        'ITransform.outputfiles is set, you can\'t set ITransform.outputsandbox')
                    return

            super(ITransform, self).__setattr__(attr, value)

        elif attr == 'inputsandbox':

            if value != []:

                if getConfig('Output')['ForbidLegacyInput']:
                    logger.error(
                        'Use of ITransform.inputsandbox is forbidden, please use ITransform.inputfiles')
                    return

                if self.inputfiles != []:
                    logger.error(
                        'ITransform.inputfiles is set, you can\'t set ITransform.inputsandbox')
                    return

            super(ITransform, self).__setattr__(attr, value)

        elif attr == 'outputdata':

            if value is not None:

                if getConfig('Output')['ForbidLegacyOutput']:
                    logger.error(
                        'Use of ITransform.outputdata is forbidden, please use ITransform.outputfiles')
                    return

                if self.outputfiles != []:
                    logger.error(
                        'ITransform.outputfiles is set, you can\'t set ITransform.outputdata')
                    return
            super(ITransform, self).__setattr__(attr, value)

        else:
            super(ITransform, self).__setattr__(attr, value)
Exemple #58
0
    def _internal_job_finalisation(job, updated_dirac_status):

        logger = getLogger()

        if updated_dirac_status == 'completed':
            # firstly update job to completing
            DiracBase._getStateTime(job, 'completing')
            if job.status in ['removed', 'killed']:
                return
            if (job.master and job.master.status in ['removed', 'killed']):
                return  # user changed it under us

            job.updateStatus('completing')
            if job.master:
                job.master.updateMasterJobStatus()

            import time
            start = time.time()
            # contact dirac for information
            job.backend.normCPUTime = execute('normCPUTime(%d)' % job.backend.id)
            getSandboxResult = execute("getOutputSandbox(%d,'%s')" % (job.backend.id, job.getOutputWorkspace().getPath()))
            file_info_dict = execute('getOutputDataInfo(%d)' % job.backend.id)
            now = time.time()
            logger.debug('Job ' + job.fqid + ' Time for Dirac metadata : ' + str(now - start))

            logger.debug('Job ' + job.fqid + ' OutputDataInfo: ' + str(file_info_dict))
            logger.debug('Job ' + job.fqid + ' OutputSandbox: ' + str(getSandboxResult))

            # Set DiracFile metadata
            wildcards = [f.namePattern for f in job.outputfiles.get(DiracFile) if regex.search(f.namePattern) is not None]

            with open(os.path.join(job.getOutputWorkspace().getPath(), getConfig('Output')['PostProcessLocationsFileName']), 'ab') as postprocesslocationsfile:
                if not hasattr(file_info_dict, 'keys'):
                    logger.error("Error understanding OutputDataInfo: %s" % str(file_info_dict))
                    from Ganga.Core.exceptions import GangaException
                    raise GangaException("Error understanding OutputDataInfo: %s" % str(file_info_dict))

                for file_name in file_info_dict.get('Value', []):
                    file_name = os.path.basename(file_name)
                    info = file_info_dict.get(file_name)
                    logger.debug("file_name: %s,\tinfo: %s" % (str(file_name), str(info)))

                    valid_wildcards = [wc for wc in wildcards if fnmatch.fnmatch(file_name, wc)]
                    if len(valid_wildcards) == 0: valid_wildcards.append('')

                    if not hasattr(info, 'get'):
                        logger.error("Error getting OutputDataInfo for: %s" % str(job.getFQID('.')))
                        logger.error("Please check the Dirac Job still exists or attempt a job.backend.reset() to try again!")
                        logger.error("Err: %s" % str(info))
                        logger.error("file_info_dict: %s" % str(file_info_dict))
                        from Ganga.Core.exceptions import GangaException
                        raise GangaException("Error getting OutputDataInfo")

                    for wc in valid_wildcards:
                        logger.debug("wildcard: %s" % str(wc))

                        DiracFileData = 'DiracFile:::%s&&%s->%s:::%s:::%s\n' % (wc,
                                                                                file_name,
                                                                                info.get('LFN', 'Error Getting LFN!'),
                                                                                str(info.get('LOCATIONS', ['NotAvailable'])),
                                                                                info.get('GUID', 'NotAvailable')
                                                                                )
                        logger.debug("DiracFileData: %s" % str(DiracFileData))
                        postprocesslocationsfile.write(DiracFileData)

            # check outputsandbox downloaded correctly
            if not result_ok(getSandboxResult):
                logger.warning('Problem retrieving outputsandbox: %s' % str(getSandboxResult))
                DiracBase._getStateTime(job, 'failed')
                if job.status in ['removed', 'killed']:
                    return
                if (job.master and job.master.status in ['removed', 'killed']):
                    return  # user changed it under us
                job.updateStatus('failed')
                if job.master:
                    job.master.updateMasterJobStatus()
                raise BackendError('Problem retrieving outputsandbox: %s' % str(getSandboxResult))

            # finally update job to completed
            DiracBase._getStateTime(job, 'completed')
            if job.status in ['removed', 'killed']:
                return
            if (job.master and job.master.status in ['removed', 'killed']):
                return  # user changed it under us
            job.updateStatus('completed')
            if job.master:
                job.master.updateMasterJobStatus()
            now = time.time()
            logger.debug('Job ' + job.fqid + ' Time for complete update : ' + str(now - start))

        elif updated_dirac_status == 'failed':
            # firstly update status to failed
            DiracBase._getStateTime(job, 'failed')
            if job.status in ['removed', 'killed']:
                return
            if (job.master and job.master.status in ['removed', 'killed']):
                return  # user changed it under us
            job.updateStatus('failed')
            if job.master:
                job.master.updateMasterJobStatus()

            # if requested try downloading outputsandbox anyway
            if getConfig('DIRAC')['failed_sandbox_download']:
                execute("getOutputSandbox(%d,'%s')" %
                        (job.backend.id, job.getOutputWorkspace().getPath()))
        else:
            logger.error("Unexpected dirac status '%s' encountered" % updated_dirac_status)
Exemple #59
0
from .GaudiPython import GaudiPython

from .Bender import Bender
from .BenderScript import BenderScript
from .Ostap import Ostap

from .BenderBox import BenderModule, BenderRun, OstapRun
from .GaudiExec import GaudiExec

# Add any additional Packages required by the user in the .gangarc file
from Ganga.Utility.Config import getConfig

logger = getLogger()
logger.debug("User Added Apps")

config = getConfig('LHCb')
user_added = config['UserAddedApplications']
user_apps = user_added.split(':')
if user_apps == user_added and len(user_added) > 0:
    AppsBaseUtils.addNewLHCbapp(user_apps)
for app in user_apps:
    if len(app) > 0:
        AppsBaseUtils.addNewLHCbapp(app)

logger.debug("Constructing AppsBase Apps")
f = open(os.path.join(os.path.dirname(__file__), 'AppsBase.py'), 'r')
cls = f.read()
f.close()
all_apps = ''
for app in AppsBaseUtils.available_apps():
    if app in dir():
Exemple #60
0
def DiracSplitter(inputs, filesPerJob, maxFiles, ignoremissing):
    """
    Generator that yields a datasets for dirac split jobs
    """
    #logger.debug( "DiracSplitter" )
    #logger.debug( "inputs: %s" % str( inputs ) )
    split_files = []
    i = inputs.__class__()

    if len(inputs.getLFNs()) != len(inputs.files):
        raise SplittingError(
            "Error trying to split dataset using DIRAC backend with non-DiracFile in the inputdata"
        )

    all_files = igroup(inputs.files[:maxFiles],
                       getConfig('DIRAC')['splitFilesChunks'],
                       leftovers=True)

    #logger.debug( "Looping over all_files" )
    #logger.debug( "%s" % str( all_files ) )

    for files in all_files:

        i.files = files

        LFNsToSplit = i.getLFNs()

        if (len(LFNsToSplit)) > 1:

            result = execute('splitInputData(%s, %d)' %
                             (i.getLFNs(), filesPerJob))

            if not result_ok(result):
                logger.error('DIRAC:: Error splitting files: %s' % str(result))
                raise SplittingError('Error splitting files.')

            split_files += result.get('Value', [])

        else:

            split_files = [LFNsToSplit]

    if len(split_files) == 0:
        raise SplittingError('An unknown error occured.')

    # FIXME
    # check that all files were available on the grid
    big_list = []
    for l in split_files:
        big_list.extend(l)
    diff = set(inputs.getFileNames()[:maxFiles]).difference(big_list)
    if len(diff) > 0:
        for f in diff:
            logger.warning('Ignored file: %s' % f)
        if not ignoremissing:
            raise SplittingError('Some files not found!')
    ###

    logger.debug("Split Files: %s" % str(split_files))

    for _dataset in split_files:
        dataset = []
        for _lfn in _dataset:
            dataset.append(DiracFile(lfn=_lfn))
        yield dataset