예제 #1
0
    def select(self, minid=None, maxid=None, **attrs):
        import repr
        from Ganga.GPIDev.Lib.Job.Job import Job

        if isType(minid, Job):
            if minid.master:
                minid = minid.master.id
            else:
                minid = minid.id
            if maxid is None:
                maxid = minid

        if isType(maxid, Job):
            if maxid.master:
                maxid = maxid.master.id
            else:
                maxid = maxid.id

        logger = getLogger()

        this_repr = repr.Repr()
        from Ganga.GPIDev.Base.Proxy import GPIProxyObjectFactory
        attrs_str = ""
        ## Loop through all possible input combinations to constructa string representation of the attrs from possible inputs
        ## Reuired to flatten the additional arguments into a flat string in attrs_str
        for a in attrs:
            from inspect import isclass
            if isclass(attrs[a]):
                this_attr = GPIProxyObjectFactory(attrs[a]())
            else:
                from Ganga.GPIDev.Base.Objects import GangaObject
                if isType(attrs[a], GangaObject):
                    this_attr = GPIProxyObjectFactory(attrs[a])
                else:
                    if type(attrs[a]) is str:
                        from Ganga.GPIDev.Base.Proxy import getRuntimeGPIObject
                        this_attr = getRuntimeGPIObject(attrs[a], True)
                    else:
                        this_attr = attrs[a]
            full_str = str(this_attr)
            split_str = full_str.split('\n')
            for line in split_str:
                line = line.strip()
            flat_str = ''.join(split_str)
            attrs_str += ", %s=\"%s\"" % (str(a), flat_str)

        logger.debug("Attrs_Str: %s" % str(attrs_str))
        logger.debug("Constructing slice: %s" %
                     str("%s.select(minid='%s', maxid='%s'%s)" %
                         (self.name, this_repr.repr(minid),
                          this_repr.repr(maxid), attrs_str)))
        this_slice = self.__class__("%s.select(minid='%s', maxid='%s'%s)" %
                                    (self.name, this_repr.repr(minid),
                                     this_repr.repr(maxid), attrs_str))

        def append(id, obj):
            this_slice.objects[id] = obj

        self.do_select(append, minid, maxid, **attrs)
        return this_slice
예제 #2
0
 def mass_line_processor(line, mass_file):
     lineParts = line.split(' ')
     pattern = lineParts[1]
     outputPath = lineParts[2]
     name = os.path.basename(outputPath).strip('.gz')
     if regex.search(mass_file.namePattern) is not None:
         if outputPath == 'ERROR':
             logger.error("Failed to upload file to mass storage")
             logger.error(line[line.find('ERROR') + 5:])
             d = MassStorageFile(namePattern=pattern)
             d.compressed = mass_file.compressed
             d.failureReason = line[line.find('ERROR') + 5:]
             mass_file.subfiles.append(GPIProxyObjectFactory(d))
         else:
             d = MassStorageFile(namePattern=name)
             d.compressed = mass_file.compressed
             d.outputfilenameformat = mass_file.outputfilenameformat
             mass_file.subfiles.append(GPIProxyObjectFactory(d))
             mass_line_processor(line, d)
     elif name == mass_file.namePattern:
         if outputPath == 'ERROR':
             logger.error("Failed to upload file to mass storage")
             logger.error(line[line.find('ERROR') + 5:])
             mass_file.failureReason = line[line.find('ERROR') + 5:]
             return
         mass_file.locations = [outputPath.strip('\n')]
예제 #3
0
 def __getitem__(self, i):
     '''Proivdes scripting (e.g. ds[2] returns the 3rd file) '''
     if isinstance(i, type(slice(0))):
         ds = GangaDataset(files=self.files[i])
         return GPIProxyObjectFactory(ds)
     else:
         return GPIProxyObjectFactory(self.files[i])
예제 #4
0
 def listShareDirContents(self, shared_directory_name):
     """
     Function which is used to list the contents of the prepared sandbox folder managed by this app
     Args:
         shared_directory_name (str): full name of directory managed by this app
     """
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.ls(shared_directory_name)
예제 #5
0
 def listShareDirContents(self, shared_directory_name):
     """
     Function which is used to list the contents of the prepared sandbox folder managed by this app
     Args:
         shared_directory_name (str): full name of directory managed by this app
     """
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.ls(shared_directory_name)
예제 #6
0
 def __getitem__(self, i):
     '''Proivdes scripting (e.g. ds[2] returns the 3rd file) '''
     if type(i) == type(slice(0)):
         ds = BesDataset(files=self.files[i])
         ds.depth = self.depth
         ds.XMLCatalogueSlice = self.XMLCatalogueSlice
         return GPIProxyObjectFactory(ds)
     else:
         return GPIProxyObjectFactory(self.files[i])
예제 #7
0
    def __incrementShareRef(obj, attr_name):
        shared_dir = getattr(obj, attr_name)

        if hasattr(shared_dir, 'name'):

            from Ganga.Core.GangaRepository import getRegistry
            from Ganga.GPIDev.Base.Proxy import GPIProxyObjectFactory
            shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())

            logger.debug("Increasing shareref")
            shareref.increase(shared_dir.name)
예제 #8
0
 def decrementShareCounter(self, shared_directory_name, remove=0):
     """
     Function which is used to decrement the number of (sub)jobs which share the prepared sandbox
     managed by this app
     Args:
         shared_directory_name (str): full name of directory managed by this app
     """
     remove = remove
     logger.debug('Decrementing shared directory reference counter')
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.decrease(shared_directory_name, remove)
예제 #9
0
 def decrementShareCounter(self, shared_directory_name, remove=0):
     """
     Function which is used to decrement the number of (sub)jobs which share the prepared sandbox
     managed by this app
     Args:
         shared_directory_name (str): full name of directory managed by this app
     """
     remove = remove
     logger.debug('Decrementing shared directory reference counter')
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.decrease(shared_directory_name, remove)
예제 #10
0
 def union(self, other):
     '''Returns a new data set w/ files from this and other.'''
     files = set(self.getFullFileNames()).union(other.getFullFileNames())
     data = BesDataset()
     data.__construct__([list(files)])
     data.depth = self.depth
     return GPIProxyObjectFactory(data)
예제 #11
0
파일: Objects.py 프로젝트: wvengen/lgipilot
 def __deepcopy__(self, memo = None):
     self._getReadAccess()
     c = super(GangaObject,self).__deepcopy__(memo)
     for name,item in self._schema.allItems():
         if not item['copyable']:
             setattr(c,name,self._schema.getDefaultValue(name))
         if item.isA(Schema.SharedItem):
             shared_dir=getattr(c,name)
             try:
                 from Ganga.Core.GangaRepository import getRegistry
                 shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
                 logger.debug("Increasing shareref")
                 shareref.increase(shared_dir.name)
             except AttributeError:
                 pass
     return c
예제 #12
0
 def difference(self, other):
     '''Returns a new data set w/ files in this that are not in other.'''
     other_files = other.getFullFileNames()
     files = set(self.getFullFileNames()).difference(other_files)
     data = BesDataset()
     data.__construct__([list(files)])
     data.depth = self.depth
     return GPIProxyObjectFactory(data)
예제 #13
0
 def intersection(self, other):
     '''Returns a new data set w/ files common to this and other.'''
     other_files = other.getFullFileNames()
     files = set(self.getFullFileNames()).intersection(other_files)
     data = BesDataset()
     data.__construct__([list(files)])
     data.depth = self.depth
     return GPIProxyObjectFactory(data)
예제 #14
0
파일: Dirac.py 프로젝트: wireshark10/ganga
 def getOutputDataLFNs(self):
     """Get a list of outputdata that has been uploaded by Dirac. Excludes
     the outputsandbox if it is there."""
     lfns = super(Dirac, self).getOutputDataLFNs()
     ds = LHCbDataset()
     for f in lfns:
         ds.files.append(DiracFile(lfn=f))
     return GPIProxyObjectFactory(ds)
예제 #15
0
 def download(self, dir='.'):
     'Downloads the LFN to dir (default is current directory).'
     dir = expandfilename(dir)
     dir = os.path.abspath(dir)
     cmd = 'result = DiracCommands.getFile("%s","%s")' % (self.name, dir)
     result = get_result(cmd, 'Problem during download', 'Download error.')
     from PhysicalFile import PhysicalFile
     return GPIProxyObjectFactory(PhysicalFile(name=result['Value']))
예제 #16
0
 def symmetricDifference(self, other):
     '''Returns a new data set w/ files in either this or other but not
     both.'''
     other_files = other.getFullFileNames()
     files = set(self.getFullFileNames()).symmetric_difference(other_files)
     data = BesDataset()
     data.__construct__([list(files)])
     data.depth = self.depth
     return GPIProxyObjectFactory(data)
예제 #17
0
 def getOutputDataLFNs(self, force_query=False):
     """Get a list of outputdata that has been uploaded by Dirac. Excludes
     the outputsandbox if it is there."""
     global dirac_ganga_server
     lfns = self._getOutputDataLFNs(dirac_ganga_server, force_query)
     ds = BesDataset()
     for f in lfns:
         ds.files.append(LogicalFile(f))
     return GPIProxyObjectFactory(ds)
예제 #18
0
파일: Dirac.py 프로젝트: wireshark10/ganga
 def getOutputData(self, outputDir=None, names=None, force=False):
     """Retrieve data stored on SE to outputDir (default=job output workspace).
     If names=None, then all outputdata is downloaded otherwise names should
     be a list of files to download. If force is True then download performed
     even if data already exists."""
     downloaded_files = super(Dirac, self).getOutputData(outputDir, names, force)
     ds = LHCbDataset()
     for f in downloaded_files:
         ds.files.append(DiracFile(lfn=f))
     return GPIProxyObjectFactory(ds)
예제 #19
0
    def readInputData(self, optsfiles, extraopts=False):
        '''Returns a LHCbDataSet object from a list of options files. The
        optional argument extraopts will decide if the extraopts string inside
        the application is considered or not. 

        Usage examples:
        # Create an LHCbDataset object with the data found in the optionsfile
        l=DaVinci(version='v22r0p2').readInputData([\"~/cmtuser/\" \
        \"DaVinci_v22r0p2/Tutorial/Analysis/options/Bs2JpsiPhi2008.py\"]) 
        # Get the data from an options file and assign it to the jobs inputdata
        field
        j.inputdata = j.application.readInputData([\"~/cmtuser/\" \
        \"DaVinci_v22r0p2/Tutorial/Analysis/options/Bs2JpsiPhi2008.py\"])

        # Assuming you have data in your extraopts, you can use the extraopts.
        # In this case your extraopts need to be fully parseable by gaudirun.py
        # So you must make sure that you have the proper import statements.
        # e.g.
        from Gaudi.Configuration import * 
        # If you mix optionsfiles and extraopts, as usual extraopts may
        # overwright your options
        # 
        # Use this to create a new job with data from extraopts of an old job
        j=Job(inputdata=jobs[-1].application.readInputData([],True))
        '''
        def dummyfile():
            temp_fd, temp_filename = tempfile.mkstemp(text=True, suffix='.py')
            os.write(temp_fd, "Dummy file to keep the Optionsparser happy")
            os.close(temp_fd)
            return temp_filename

        if type(optsfiles) != type([]):
            optsfiles = [optsfiles]

        # use a dummy file to keep the parser happy
        if len(optsfiles) == 0:
            optsfiles.append(dummyfile())

        if extraopts:
            extraopts = self.extraopts
        else:
            extraopts = ""

        # parser = check_inputs(optsfiles, extraopts, self.env)
        try:
            parser = PythonOptionsParser(optsfiles, extraopts,
                                         self.getenv(False))
        except Exception as err:
            msg = 'Unable to parse the job options. Please check options ' \
                  'files and extraopts.'
            logger.error("PythonOptionsParserError:\n%s" % str(err))
            raise ApplicationConfigurationError(msg)

        return GPIProxyObjectFactory(parser.get_input_data())
예제 #20
0
파일: File.py 프로젝트: wvengen/lgipilot
 def add(self, input):
     if not isType(input, list):
         input = [input] 
     for item in input:
         if isType(item, str):
             if os.path.isfile(expandfilename(item)):
                 logger.info('Copying file %s to shared directory %s'%(item, self.name))
                 shutil.copy2(expandfilename(item), os.path.join(shared_path,self.name))
                 shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
                 shareref.increase(self.name)
                 shareref.decrease(self.name)
             else:
                 logger.error('File %s not found' % expandfilename(item))
         elif isType(item,File) and item.name is not '' and os.path.isfile(expandfilename(item.name)):
             logger.info('Copying file object %s to shared directory %s'%(item.name,self.name))
             shutil.copy2(expandfilename(item.name), os.path.join(shared_path,self.name))
             shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
             shareref.increase(self.name)
             shareref.decrease(self.name)
         else:
             logger.error('File %s not found' % expandfilename(item.name))
예제 #21
0
 def upload(self, lfn, diracSE, guid=None):
     'Upload PFN to LFC on SE "diracSE" w/ LFN "lfn".'
     from LogicalFile import get_result
     if guid is None:
         cmd = 'result = DiracCommands.addFile("%s","%s","%s",None)' % \
               (lfn,self.name,diracSE)
     else:
         cmd = 'result = DiracCommands.addFile("%s","%s","%s","%s")' % \
               (lfn,self.name,diracSE,guid)
     result = get_result(cmd, 'Problem w/ upload', 'Error uploading file.')
     from LogicalFile import LogicalFile
     return GPIProxyObjectFactory(LogicalFile(name=lfn))
예제 #22
0
파일: Objects.py 프로젝트: milliams/ganga
    def __deepcopy__(self, memo=None):
        self = stripProxy(self)
        self._getReadAccess()
        c = super(GangaObject, self).__deepcopy__(memo)
        if self._schema is not None:
            for name, item in self._schema.allItems():
                if not item['copyable']:
                    setattr(c, name, self._schema.getDefaultValue(name))

                if item.isA(Schema.SharedItem):

                    shared_dir = getattr(c, name)

                    if hasattr(shared_dir, 'name'):

                        from Ganga.Core.GangaRepository import getRegistry
                        from Ganga.GPIDev.Base.Proxy import GPIProxyObjectFactory
                        shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())

                        logger.debug("Increasing shareref")
                        shareref.increase(shared_dir.name)
        return c
예제 #23
0
    def processWildcardMatches(self):
        if self.subfiles:
            return self.subfiles

        if regex.search(self.namePattern):
            ls_cmd = getConfig('Output')['MassStorageFile']['uploadOptions']['ls_cmd']
            exitcode, output, m = self.shell.cmd1(ls_cmd + ' ' + self.inputremotedirectory, capture_stderr=True)

            for filename in output.split('\n'):
                if fnmatch(filename, self.namePattern):
                    subfile = MassStorageFile(namePattern=filename)
                    subfile.inputremotedirectory = self.inputremotedirectory

                    self.subfiles.append(GPIProxyObjectFactory(subfile))
예제 #24
0
 def getDataset(self):
     '''Gets the dataset from the bookkeeping for current path, etc.'''
     files = []
     if not self.path: return None
     if not self.type in ['Path','RunsByDate','Run','Production']:
         raise GangaException('Type="%s" is not valid.' % self.type)
     lfnf = open('/afs/ihep.ac.cn/users/z/zhangxm/ganga/Ganga/install/5.6.2/python/GangaBoss/Lib/Dataset/lfns','r')
     for line in lfnf.readlines(): 
         newline = line.strip()
         files.append(newline)
     ds = BesDataset()
     for f in files: ds.files.append(LogicalFile(f))
     
     return GPIProxyObjectFactory(ds)
예제 #25
0
    def getDataset(self):
        '''Gets the dataset from the bookkeeping for current dict.'''
        if not self.name: return None
        badger = Badger()
        files = []
        files = badger.getFilesByDatasetName(self.name)

        ds = BesDataset()
        for f in files:
           logicalFile = "LFN:"+f
           logger.debug("zhangxm log: data files LFN: %s", f)
           ds.files.append(logicalFile)


        return GPIProxyObjectFactory(ds)
예제 #26
0
    def processOutputWildcardMatches(self):
        """This collects the subfiles for wildcarded output LocalFile"""
        import glob

        fileName = self.namePattern

        if self.compressed:
            fileName = '%s.gz' % self.namePattern

        sourceDir = self.getJobObject().outputdir
        if regex.search(fileName) is not None:

            for currentFile in glob.glob(os.path.join(sourceDir, fileName)):

                d = LocalFile(namePattern=os.path.basename(currentFile))
                d.compressed = self.compressed

                self.subfiles.append(GPIProxyObjectFactory(d))
예제 #27
0
    def processWildcardMatches(self):
        if self.subfiles:
            return self.subfiles

        from fnmatch import fnmatch

        if regex.search(self.namePattern):
            #TODO namePattern shouldn't contain slashes and se_rpath should not contain wildcards
            cmd = 'lcg-ls lfn:/grid/{vo}/{se_rpath}'.format(vo=self.credential_requirements.vo, se_rpath=self.se_rpath)
            exitcode,output,m = getShell(self.credential_requirements).cmd1(cmd, capture_stderr=True)

            for filename in output.split('\n'):
                if fnmatch(filename, self.namePattern):
                    subfile = LCGSEFile(namePattern=filename)
                    subfile.se_rpath = self.se_rpath
                    subfile.lfc_host = self.lfc_host

                    self.subfiles.append(GPIProxyObjectFactory(subfile))
예제 #28
0
        def lcgse_line_processor(line, lcgse_file):
            guid = line[line.find('->') + 2:]
            pattern = line.split(' ')[1]
            name = line.split(' ')[2].strip('.gz')

            if regex.search(lcgse_file.namePattern) is not None:
                d = LCGSEFile(namePattern=name)
                d.compressed = lcgse_file.compressed
                d.lfc_host = lcgse_file.lfc_host
                d.se = lcgse_file.se
                # todo copy also the other attributes
                lcgse_file.subfiles.append(GPIProxyObjectFactory(d))
                lcgse_line_processor(line, d)
            elif pattern == lcgse_file.namePattern:
                if guid.startswith('ERROR'):
                    logger.error("Failed to upload file to LCG SE")
                    logger.error(guid[6:])
                    lcgse_file.failureReason = guid[6:]
                    return
                lcgse_file.locations = guid
예제 #29
0
    def processWildcardMatches(self):

        if self.subfiles:
            return self.subfiles

        import glob

        fileName = self.namePattern

        if self.compressed:
            fileName = '%s.gz' % self.namePattern

        sourceDir = self.localDir

        if regex.search(fileName) is not None:
            for currentFile in glob.glob(os.path.join(sourceDir, fileName)):
                d = LocalFile(namePattern=os.path.basename(currentFile),
                              localDir=os.path.dirname(currentFile))
                d.compressed = self.compressed

                self.subfiles.append(GPIProxyObjectFactory(d))
예제 #30
0
    def processWildcardMatches(self):
        if self.subfiles:
            return self.subfiles

        from fnmatch import fnmatch

        if regex.search(self.namePattern):
            # TODO namePattern shouldn't contain slashes and se_rpath should
            # not contain wildcards
            exitcode, output, m = self.shell.cmd1(
                'lcg-ls lfn:/grid/' + getConfig('LCG')['VirtualOrganisation'] +
                '/' + self.se_rpath,
                capture_stderr=True)

            for filename in output.split('\n'):
                if fnmatch(filename, self.namePattern):
                    subfile = LCGSEFile(namePattern=filename)
                    subfile.se_rpath = self.se_rpath
                    subfile.lfc_host = self.lfc_host

                    self.subfiles.append(GPIProxyObjectFactory(subfile))
예제 #31
0
    def getOutputData(self, dir=None, names=None):
        """Retrieve data stored on SE to dir (default=job output workspace).
        If names=None, then all outputdata is downloaded otherwise names should
        be a list of files to download."""
        j = self.getJobObject()
        if not names: names = ''
        if not dir: dir = j.getOutputWorkspace().getPath()

        global dirac_ganga_server
        cmd = 'result = DiracCommands.getOutputData("%s","%s",%d)' \
              % (names,dir,self.id)
        result = dirac_ganga_server.execute(cmd)

        downloaded_files = []
        if not result_ok(result):
            logger.error('Output download failed: %s' % str(result))
        else:
            downloaded_files = result.get('Value', [])
        ds = BesDataset()
        for f in downloaded_files:
            ds.files.append(LogicalFile(f))
        return GPIProxyObjectFactory(ds)
예제 #32
0
 def listShareDirContents(self,shared_directory_name):
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.ls(shared_directory_name)
예제 #33
0
    def put(self):
        """
        Postprocesses (upload) output file to the desired destination from the client
        """
        import hashlib
        from apiclient.http import MediaFileUpload

        service = self._setup_service()

        # Sets the target directory
        dir_path = self.localDir
        if self.localDir == '':
            dir_path = os.getcwd()

        if self._getParent() is not None:
            dir_path = self.getJobObject().getOutputWorkspace().getPath()

        # Wildcard procedure
        if regex.search(self.namePattern) is not None:
            for wildfile in glob.glob(os.path.join(dir_path,
                                                   self.namePattern)):
                FILENAME = wildfile
                filename = os.path.basename(wildfile)

                # Upload procedure
                media_body = MediaFileUpload(FILENAME,
                                             mimetype='text/plain',
                                             resumable=True)
                body = {
                    'title':
                    '%s' % filename,
                    'description':
                    'A test document',
                    'mimeType':
                    'text/plain',
                    'parents': [{
                        "kind": "drive#fileLink",
                        "id": "%s" % self.GangaFolderId
                    }]
                }

                # Metadata file and md5checksum intergrity check
                file = service.files().insert(body=body,
                                              media_body=media_body).execute()
                with open(FILENAME, 'rb') as thefile:
                    if file.get('md5Checksum') == hashlib.md5(
                            thefile.read()).hexdigest():
                        logger.info("File \'%s\' uploaded successfully" %
                                    filename)
                    else:
                        logger.error("File \'%s\' uploaded unsuccessfully" %
                                     filename)

                # Assign new schema components to each file and append to job
                # subfiles
                g = GoogleFile(filename)
                g.downloadURL = file.get('downloadUrl', '')
                g.id = file.get('id', '')
                g.title = file.get('title', '')
                self.subfiles.append(GPIProxyObjectFactory(g))

        # For non-wildcard upload
        else:
            # Path to the file to upload
            FILENAME = os.path.join(dir_path, self.namePattern)

            # Upload procedure, can edit more of file metadata
            media_body = MediaFileUpload(FILENAME,
                                         mimetype='text/plain',
                                         resumable=True)
            body = {
                'title':
                '%s' % self.namePattern,
                'description':
                'A test document',
                'mimeType':
                'text/plain',
                'parents': [{
                    "kind": "drive#fileLink",
                    "id": "%s" % self.GangaFolderId
                }]
            }

            # Metadata storage and md5checksum integrity check
            file = service.files().insert(body=body,
                                          media_body=media_body).execute()

            with open(FILENAME, 'rb') as thefile:
                if file.get('md5Checksum') == hashlib.md5(
                        thefile.read()).hexdigest():
                    logger.info("File \'%s\' uploaded succesfully" %
                                self.namePattern)
                else:
                    logger.error("Upload Unsuccessful")

            # Assign values to new schema components
            self.downloadURL = file.get('downloadUrl', '')
            self.id = file.get('id', '')
            self.title = file.get('title', '')

            return
        return GPIProxyObjectFactory(self.subfiles[:])
예제 #34
0
 def decrementShareCounter(self, shared_directory_name, remove=0):
     remove=remove
     logger.debug('Decrementing shared directory reference counter')
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     shareref.decrease(shared_directory_name, remove)
예제 #35
0
 def incrementShareCounter(self, shared_directory_name):
     logger.debug('Incrementing shared directory reference counter')
     shareref = GPIProxyObjectFactory(getRegistry("prep").getShareRef())
     logger.debug('within incrementShareCounter, calling increase')
     shareref.increase(shared_directory_name)
예제 #36
0
                        'Exceptions')
exportToPublicInterface('ProtectedAttributeError', ProtectedAttributeError,
                        'Exceptions')
exportToPublicInterface('ReadOnlyObjectError', ReadOnlyObjectError,
                        'Exceptions')
exportToPublicInterface('JobError', JobError, 'Exceptions')

# ------------------------------------------------------------------------------------
# Import Monitoring Services
import Ganga.GPIDev.MonitoringServices

# ------------------------------------------------------------------------------------
# only the available credentials are exported
credential = getCredential(name='GridProxy', create=False)
if credential:
    exportToPublicInterface('gridProxy', GPIProxyObjectFactory(credential),
                            'Objects', 'Grid proxy management object.')

credential = getCredential('AfsToken')
if credential:
    exportToPublicInterface('afsToken', GPIProxyObjectFactory(credential),
                            'Objects', 'AFS token management object.')

# ------------------------------------------------------------------------------------
# Add Misc functions to public interface
exportToPublicInterface('license', ganga_license, 'Functions')
exportToPublicInterface('load', load, 'Functions')
exportToPublicInterface('export', export, 'Functions')
exportToPublicInterface('typename', typename, 'Functions')
exportToPublicInterface('categoryname', categoryname, 'Functions')
exportToPublicInterface('plugins', plugins, 'Functions')
예제 #37
0
 def __getitem__(self, i):
     '''Proivdes scripting (e.g. od[2] returns the 3rd file name) '''
     if type(i) == type(slice(0)):
         return GPIProxyObjectFactory(OutputData(files=self.files[i]))
     else:
         return self.files[i]
예제 #38
0
    def put(self):
        """
        Creates and executes commands for file upload to mass storage (Castor), this method will
        be called on the client
        """
        import glob
        import re

        sourceDir = ''

        # if used as a stand alone object
        if self._getParent() is None:
            if self.localDir == '':
                import os
                _CWD = os.getcwd()
                if os.path.isfile(os.path.join(_CWD, self.namePattern)):
                    sourceDir = _CWD
                else:
                    logger.warning(
                        'localDir attribute is empty, don\'t know from which dir to take the file'
                    )
                    return
            else:
                sourceDir = self.localDir

                (result, message) = self.validate()

                if result == False:
                    logger.warning(message)
                    return

        else:
            job = self.getJobObject()
            sourceDir = job.outputdir

            # if there are subjobs, the put method will be called on every subjob
            # and will upload the resulted output file
            if len(job.subjobs) > 0:
                return

        massStorageConfig = getConfig(
            'Output')['MassStorageFile']['uploadOptions']

        mkdir_cmd = massStorageConfig['mkdir_cmd']
        cp_cmd = massStorageConfig['cp_cmd']
        ls_cmd = massStorageConfig['ls_cmd']
        massStoragePath = massStorageConfig['path']

        # create the last directory (if not exist) from the config path
        import os.path
        pathToDirName = os.path.dirname(massStoragePath)
        dirName = os.path.basename(massStoragePath)

        (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess(
            '%s %s' % (ls_cmd, pathToDirName))
        if exitcode != 0:
            self.handleUploadFailure(mystderr)
            return

        directoryExists = False
        for directory in mystdout.split('\n'):
            if directory.strip() == dirName:
                directoryExists = True
                break

        if not directoryExists:
            (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess(
                '%s %s' % (mkdir_cmd, massStoragePath))
            if exitcode != 0:
                self.handleUploadFailure(mystderr)
                return

        # the folder part of self.outputfilenameformat
        folderStructure = ''
        # the file name part of self.outputfilenameformat
        filenameStructure = ''

        if self._getParent() != None:
            jobfqid = self.getJobObject().fqid

            jobid = jobfqid
            subjobid = ''

            if (jobfqid.find('.') > -1):
                jobid = jobfqid.split('.')[0]
                subjobid = jobfqid.split('.')[1]

            if self.outputfilenameformat is None:
                filenameStructure = '{fname}'
                # create jid/sjid directories
                folderStructure = jobid
                if subjobid != '':
                    folderStructure = os.path.join(jobid, subjobid)

            else:
                filenameStructure = os.path.basename(self.outputfilenameformat)
                filenameStructure = filenameStructure.replace('{jid}', jobid)

                folderStructure = os.path.dirname(self.outputfilenameformat)
                folderStructure = folderStructure.replace('{jid}', jobid)

                if subjobid != '':
                    filenameStructure = filenameStructure.replace(
                        '{sjid}', subjobid)
                    folderStructure = folderStructure.replace(
                        '{sjid}', subjobid)
        else:
            if self.outputfilenameformat != None:
                folderStructure = os.path.dirname(self.outputfilenameformat)
                filenameStructure = os.path.basename(self.outputfilenameformat)
            else:
                filenameStructure = '{fname}'

        # create the folder structure
        if folderStructure != '':

            folderStructure = folderStructure.strip('/')
            massStoragePath = os.path.join(massStoragePath, folderStructure)
            command = '%s -p %s' % (mkdir_cmd, massStoragePath)
            (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess(command)
            if exitcode != 0:
                self.handleUploadFailure(mystderr)
                return

        # here filenameStructure has replaced jid and sjid if any, and only not
        # replaced keyword is fname
        fileName = self.namePattern
        if self.compressed:
            fileName = '%s.gz' % self.namePattern

        if regex.search(fileName) is not None:
            for currentFile in glob.glob(os.path.join(sourceDir, fileName)):
                finalFilename = filenameStructure.replace(
                    '{fname}', os.path.basename(currentFile))
                (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess(
                    '%s %s %s' %
                    (cp_cmd, currentFile,
                     os.path.join(massStoragePath, finalFilename)))

                d = MassStorageFile(namePattern=os.path.basename(currentFile))
                d.compressed = self.compressed

                if exitcode != 0:
                    self.handleUploadFailure(mystderr)
                else:
                    logger.info(
                        '%s successfully uploaded to mass storage as %s' %
                        (currentFile,
                         os.path.join(massStoragePath, finalFilename)))
                    d.locations = os.path.join(massStoragePath,
                                               os.path.basename(finalFilename))

                    # Alex removed this as more general approach in job.py after put() is called
                    # remove file from output dir if this object is attached to a job
                    # if self._getParent() != None:
                    #    os.system('rm %s' % os.path.join(sourceDir, currentFile))

                self.subfiles.append(GPIProxyObjectFactory(d))
        else:
            currentFile = os.path.join(sourceDir, fileName)
            finalFilename = filenameStructure.replace(
                '{fname}', os.path.basename(currentFile))
            (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess(
                '%s %s %s' % (cp_cmd, currentFile,
                              os.path.join(massStoragePath, finalFilename)))
            if exitcode != 0:
                self.handleUploadFailure(mystderr)
            else:
                logger.info('%s successfully uploaded to mass storage as %s' %
                            (currentFile,
                             os.path.join(massStoragePath, finalFilename)))
                location = os.path.join(massStoragePath,
                                        os.path.basename(finalFilename))
                if location not in self.locations:
                    self.locations.append(location)