コード例 #1
0
ファイル: TestGangaList.py プロジェクト: slangrock/ganga
    def setUp(self):
        self.ganga_list = GangaList()

        self.plain1 = [self._makeRandomTFile() for _ in range(15)]
        self.plain2 = [self._makeRandomTFile() for _ in range(10)]

        self.proxied1 = GangaList()
        self.proxied1.extend(self.plain1[:])
        self.proxied2 = GangaList()
        self.proxied2.extend(self.plain2[:])

        assert len(getProxyAttr(self.proxied1, '_list')) == len(
            self.plain1), 'Somthings wrong with construction 1'
        assert len(getProxyAttr(self.proxied2, '_list')) == len(
            self.plain2), 'Somthings wrong with construction 2'
コード例 #2
0
def expandWildCards(filelist):
    """

    """
    l = GangaList()
    l.extend(iexpandWildCards(filelist))
    return l
コード例 #3
0
ファイル: LHCbDataset.py プロジェクト: will-cern/ganga
    def __init__(self, files=None, persistency=None, depth=0, fromRef=False):
        super(LHCbDataset, self).__init__()
        if files is None:
            files = []
        self.files = GangaList()
        process_files = True
        if fromRef:
            self.files._list.extend(files)
            process_files = False
        elif isinstance(files, GangaList):

            def isFileTest(_file):
                return isinstance(_file, IGangaFile)

            areFiles = all([isFileTest(f) for f in files._list])
            if areFiles:
                self.files._list.extend(files._list)
                process_files = False
        elif isinstance(files, LHCbDataset):
            self.files._list.extend(files.files._list)
            process_files = False

        if process_files:
            if isType(files, LHCbDataset):
                for this_file in files:
                    self.files.append(deepcopy(this_file))
            elif isType(files, IGangaFile):
                self.files.append(deepcopy(files))
            elif isType(files, (list, tuple, GangaList)):
                new_list = []
                for this_file in files:
                    if type(this_file) is str:
                        new_file = string_datafile_shortcut_lhcb(
                            this_file, None)
                    elif isType(this_file, IGangaFile):
                        new_file = stripProxy(this_file)
                    else:
                        new_file = strToDataFile(this_file)
                    new_list.append(new_file)
                self.files.extend(new_list)
            elif type(files) is str:
                self.files.append(string_datafile_shortcut_lhcb(files, None),
                                  False)
            else:
                raise GangaException(
                    "Unknown object passed to LHCbDataset constructor!")

        self.files._setParent(self)

        logger.debug("Processed inputs, assigning files")

        # Feel free to turn this on again for debugging but it's potentially quite expensive
        #logger.debug( "Creating dataset with:\n%s" % self.files )

        logger.debug("Assigned files")

        self.persistency = persistency
        self.depth = depth
        logger.debug("Dataset Created")
コード例 #4
0
    def testPrintingGPIObjectList(self):

        g = GangaList()
        for _ in range(10):
            g.append(self._makeRandomTFile())

        g_string = str(g)
        assert eval(g_string) == g, 'String should correctly eval'
コード例 #5
0
 def cloneVal(v):
     GangaList = _getGangaList()
     if isinstance(v, (list, tuple, GangaList)):
         new_v = GangaList()
         for elem in v:
             new_v.append(self.__cloneVal(elem, obj))
         return new_v
     else:
         return self.__cloneVal(v, obj)
コード例 #6
0
ファイル: TestGangaList.py プロジェクト: wireshark10/ganga
    def __init__(self, *args, **kwargs):
        super(TestGangaList, self).__init__(*args, **kwargs)

        self.plain1 = []
        self.plain2 = []

        self.proxied1 = []
        self.proxied2 = []

        self.ganga_list = GangaList()
コード例 #7
0
    def setUp(self):
        super(TestGangaList, self).setUp()

        self.plain1 = [self._makeRandomTFile() for _ in range(15)]
        self.plain2 = [self._makeRandomTFile() for _ in range(10)]

        self.proxied1 = GangaList()
        self.proxied1.extend(self.plain1[:])
        self.proxied2 = GangaList()
        self.proxied2.extend(self.plain2[:])

        t = TFile()
        real_t = stripProxy(t)
        new_proxy_t = addProxy(real_t)
        #hopefully_t = stripProxy(new_proxy_t)
        #assert real_t is hopefully_t
        assert t is new_proxy_t

        self.assertEqual(len(getProxyAttr(self.proxied1, '_list')), len(self.plain1), "Something's wrong with construction")
        self.assertEqual(len(getProxyAttr(self.proxied2, '_list')), len(self.plain2), "Something's wrong with construction")
コード例 #8
0
    def testFullPrintingGPIObjectList(self):

        g = GangaList()
        for _ in range(10):
            g.append(self._makeRandomTFile())
        g_string = str(g)

        import StringIO
        sio = StringIO.StringIO()
        full_print(g, sio)
        assert g_string == str(
            sio.getvalue()).rstrip(), 'Orphaned lists should full_print'
コード例 #9
0
    def testPrintingPlainList(self):

        g = GangaList()
        l = []
        print('"' + str(g) + '"')
        print('"' + str(l) + '"')
        print(l == g)
        assert str(l) == str(g), 'Empty lists should print the same'

        for i in xrange(100):
            g.append(i)
            l.append(i)
        assert str(l) == str(g), 'Normal Python objects should print the same'
コード例 #10
0
ファイル: BOOT.py プロジェクト: will-cern/ganga
def getDiracFiles():
    import os
    from GangaDirac.Lib.Files.DiracFile import DiracFile
    from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList
    filename = DiracFile.diracLFNBase().replace('/', '-') + '.lfns'
    logger.info(
        'Creating list, this can take a while if you have a large number of SE files, please wait...'
    )
    execute('dirac-dms-user-lfns &> /dev/null', shell=True, timeout=None)
    g = GangaList()
    with open(filename[1:], 'r') as lfnlist:
        lfnlist.seek(0)
        g.extend(
            (DiracFile(lfn='%s' % lfn.strip()) for lfn in lfnlist.readlines()))
    return addProxy(g)
コード例 #11
0
ファイル: Proxy.py プロジェクト: slangrock/ganga
 def _stripAttribute(obj, v, name):
     # just warn
     # print '**** checking',v,v.__class__,
     # isinstance(val,GPIProxyObject)
     if isinstance(v, list):
         from Ganga.GPI import GangaList
         v_new = GangaList()
         for elem in v:
             v_new.append(elem)
         v = v_new
     if isinstance(v, GPIProxyObject) or hasattr(v, implRef):
         v = stripProxy(v)
         logger.debug('%s property: assigned a component object (%s used)' %
                      (name, implRef))
     return stripProxy(obj)._attribute_filter__set__(name, v)
コード例 #12
0
ファイル: TestCopy.py プロジェクト: slangrock/ganga
    def testCopy(self):

        gl = GangaList()

        numberOfFiles = 100

        for _ in range(numberOfFiles):
            # add something which is generally not allowed by GangaList
            gl.append([self._makeRandomTFile()])

        assert len(gl) == numberOfFiles, 'Right number of files must be made'

        gl2 = copy.copy(gl)
        assert len(gl2) == len(gl), 'lists must be equal'
        assert gl2 is not gl, 'list must be copies'
コード例 #13
0
    def __cloneVal(self, v, obj):

        item = obj._schema[getName(self)]

        if v is None:
            if item.hasProperty('category'):
                assertion = item['optional'] and (item['category'] != 'internal')
            else:
                assertion = item['optional']
            #assert(assertion)
            if assertion is False:
                logger.warning("Item: '%s'. of class type: '%s'. Has a Default value of 'None' but is NOT optional!!!" % (getName(self), type(obj)))
                logger.warning("Please contact the developers and make sure this is updated!")
            return None
        elif isinstance(v, str):
            return str(v)
        elif isinstance(v, int):
            return int(v)
        elif isinstance(v, dict):
            new_dict = {}
            for key, item in new_dict.iteritems():
                new_dict[key] = self.__cloneVal(v, obj)
            return new_dict
        else:
            if not isinstance(v, Node) and isinstance(v, (list, tuple)):
                try:
                    GangaList = _getGangaList()
                    new_v = GangaList()
                except ImportError:
                    new_v = []
                for elem in v:
                    new_v.append(self.__cloneVal(elem, obj))
                #return new_v
            elif not isinstance(v, Node):
                if inspect.isclass(v):
                    new_v = v()
                else:
                    new_v = v
                if not isinstance(new_v, Node):
                    logger.error("v: %s" % str(v))
                    raise GangaException("Error: found Object: %s of type: %s expected an object inheriting from Node!" % (str(v), str(type(v))))
                else:
                    new_v = self.__copyNodeObject(new_v, obj)
            else:
                new_v = self.__copyNodeObject(v, obj)

            return new_v
コード例 #14
0
    def testDeepCopy(self):

        from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList
        gl = GangaList()

        numberOfFiles = 100

        for _ in range(numberOfFiles):
            # add something which is generally not allowed by GangaList
            gl.append([self._makeRandomTFile()])

        assert len(gl) == numberOfFiles, 'Right number of files must be made'

        gl2 = copy.deepcopy(gl)
        assert len(gl2) == len(gl), 'lists must be equal'
        assert gl2 is not gl, 'list must be copies'
        assert gl[0] is not gl2[0], 'the references must not be copied'
コード例 #15
0
ファイル: LHCbDataset.py プロジェクト: slangrock/ganga
    def __init__(self, files=None, persistency=None, depth=0):
        super(LHCbDataset, self).__init__()
        if files is None:
            files = []
        new_files = GangaList()
        if isType(files, LHCbDataset):
            for this_file in files:
                new_files.append(deepcopy(this_file))
        elif isType(files, IGangaFile):
            new_files.append(deepcopy(this_file))
        elif isType(files, (list, tuple, GangaList)):
            new_list = []
            for this_file in files:
                if type(this_file) is str:
                    new_file = string_datafile_shortcut_lhcb(this_file, None)
                elif isType(this_file, IGangaFile):
                    new_file = this_file
                else:
                    new_file = strToDataFile(this_file)
                new_list.append(stripProxy(new_file))
            stripProxy(new_files)._list = new_list
        elif type(files) is str:
            new_files.append(string_datafile_shortcut_lhcb(this_file, None), False)
        else:
            raise GangaException("Unknown object passed to LHCbDataset constructor!")
        new_files._setParent(self)

        logger.debug("Processed inputs, assigning files")

        # Feel free to turn this on again for debugging but it's potentially quite expensive
        #logger.debug( "Creating dataset with:\n%s" % files )
        self.files = new_files
        
        logger.debug("Assigned files")

        self.persistency = persistency
        self.depth = depth
        logger.debug("Dataset Created")
コード例 #16
0
    def put(self, lfn='', force=False, uploadSE="", replicate=False):
        """
        Try to upload file sequentially to storage elements defined in configDirac['allDiracSE'].
        File will be uploaded to the first SE that the upload command succeeds for.

        The file is uploaded to the SE described by the DiracFile.defaultSE attribute

        Alternatively, the user can specify an uploadSE which contains an SE
        which the file is to be uploaded to.

        If the user wants to replicate this file(s) across all SE then they should state replicate = True.

        Return value will be either the stdout from the dirac upload command if not
        using the wildcard characters '*?[]' in the namePattern.
        If the wildcard characters are used then the return value will be a list containing
        newly created DiracFile objects which were the result of glob-ing the wildcards.

        The objects in this list will have been uploaded or had their failureReason attribute populated if the
        upload failed.
        """

        if self.lfn != "" and force == False and lfn == '':
            logger.warning(
                "Warning you're about to 'put' this DiracFile: %s on the grid as it already has an lfn: %s"
                % (self.namePattern, self.lfn))
            decision = raw_input('y / [n]:')
            while not (decision.lower() in ['y', 'n']
                       or decision.lower() == ''):
                decision = raw_input('y / [n]:')

            if decision.lower() == 'y':
                pass
            else:
                return

        if (lfn != '' and self.lfn != '') and force == False:
            logger.warning(
                "Warning you're attempting to put this DiracFile: %s" %
                self.namePattern)
            logger.warning("It currently has an LFN associated with it: %s" %
                           self.lfn)
            logger.warning(
                "Do you want to continue and attempt to upload to: %s" % lfn)
            decision = raw_input('y / [n]:')
            while not (decision.lower() in ['y', 'n', '']):
                decision = raw_input('y / [n]:')

            if decision.lower() == 'y':
                pass
            else:
                return

        if lfn and os.path.basename(lfn) != self.namePattern:
            logger.warning(
                "Changing namePattern from: '%s' to '%s' during put operation"
                % (self.namePattern, os.path.basename(lfn)))

        if lfn:
            self.lfn = lfn

        # looks like will only need this for the interactive uploading of jobs.
        # Also if any backend need dirac upload on client then when downloaded
        # this will upload then delete the file.

        if self.namePattern == "":
            if self.lfn != '':
                logger.warning(
                    "'Put'-ing a file with ONLY an existing LFN makes no sense!"
                )
            raise GangaFileError(
                'Can\'t upload a file without a local file name.')

        sourceDir = self.localDir
        if self.localDir is None:
            sourceDir = os.getcwd()
            # attached to a job, use the joboutputdir
            if self._parent != None and os.path.isdir(
                    self.getJobObject().outputdir):
                sourceDir = self.getJobObject().outputdir

        if not os.path.isdir(sourceDir):
            raise GangaFileError(
                'localDir attribute is not a valid dir, don\'t know from which dir to take the file'
            )

        if regex.search(self.namePattern) is not None:
            if self.lfn != "":
                logger.warning(
                    "Cannot specify a single lfn for a wildcard namePattern")
                logger.warning("LFN will be generated automatically")
                self.lfn = ""

        if not self.remoteDir:
            try:
                job = self.getJobObject()
                lfn_folder = os.path.join("GangaJob_%s" % job.getFQID('/'),
                                          "OutputFiles")
            except AssertionError:
                t = datetime.datetime.now()
                this_date = t.strftime("%H.%M_%A_%d_%B_%Y")
                lfn_folder = os.path.join('GangaFiles_%s' % this_date)
            lfn_base = os.path.join(
                DiracFile.diracLFNBase(self.credential_requirements),
                lfn_folder)

        else:
            lfn_base = os.path.join(
                DiracFile.diracLFNBase(self.credential_requirements),
                self.remoteDir)

        if uploadSE == "":
            if self.defaultSE != "":
                storage_elements = [self.defaultSE]
            else:
                if configDirac['allDiracSE']:
                    storage_elements = [
                        random.choice(configDirac['allDiracSE'])
                    ]
                else:
                    raise GangaFileError(
                        "Can't upload a file without a valid defaultSE or storageSE, please provide one"
                    )
        elif isinstance(uploadSE, list):
            storage_elements = uploadSE
        else:
            storage_elements = [uploadSE]

        outputFiles = GangaList()
        for this_file in glob.glob(os.path.join(sourceDir, self.namePattern)):
            name = this_file

            if not os.path.exists(name):
                if not self.compressed:
                    raise GangaFileError(
                        'Cannot upload file. File "%s" must exist!' % name)
                name += '.gz'
                if not os.path.exists(name):
                    raise GangaFileError('File "%s" must exist!' % name)
            else:
                if self.compressed:
                    os.system('gzip -c %s > %s.gz' % (name, name))
                    name += '.gz'
                    if not os.path.exists(name):
                        raise GangaFileError('File "%s" must exist!' % name)

            lfn = os.path.join(lfn_base, os.path.basename(this_file))

            d = DiracFile()
            d.namePattern = os.path.basename(name)
            d.compressed = self.compressed
            d.localDir = sourceDir
            stderr = ''
            stdout = ''
            logger.info('Uploading file \'%s\' to \'%s\' as \'%s\'' %
                        (name, storage_elements[0], lfn))
            logger.debug('execute: uploadFile("%s", "%s", %s)' %
                         (lfn, os.path.join(sourceDir,
                                            name), str([storage_elements[0]])))
            try:
                stdout = execute('uploadFile("%s", "%s", %s)' %
                                 (lfn, os.path.join(sourceDir, name),
                                  str([storage_elements[0]])),
                                 cred_req=self.credential_requirements)
            except GangaDiracError as err:
                logger.warning("Couldn't upload file '%s': \'%s\'" %
                               (os.path.basename(name), err))
                failureReason = "Error in uploading file '%s' : '%s'" % (
                    os.path.basename(name), err)
                if regex.search(self.namePattern) is not None:
                    d.failureReason = failureReason
                    outputFiles.append(d)
                    continue
                self.failureReason += '\n' + failureReason
                continue

            stdout_temp = stdout.get('Successful')

            if not stdout_temp:
                msg = "Couldn't upload file '%s': \'%s\'" % (
                    os.path.basename(name), stdout)
                logger.warning(msg)
                if regex.search(self.namePattern) is not None:
                    d.failureReason = msg
                    outputFiles.append(d)
                    continue
                self.failureReason = msg
                continue
            else:
                lfn_out = stdout_temp[lfn]

            # when doing the two step upload delete the temp file
            if self.compressed or self._parent != None:
                os.remove(name)
            # need another eval as datetime needs to be included.
            guid = lfn_out.get('GUID', '')
            if regex.search(self.namePattern) is not None:
                d.lfn = lfn
                d.remoteDir = os.path.dirname(lfn)
                d.locations = lfn_out.get('allDiracSE', '')
                d.guid = guid
                outputFiles.append(d)
                continue
            else:
                self.lfn = lfn
                self.remoteDir = os.path.dirname(lfn)
                self.locations = lfn_out.get('allDiracSE', '')
                self.guid = guid

        if replicate == True:

            if len(outputFiles) == 1 or len(outputFiles) == 0:
                storage_elements.pop(0)
                for se in storage_elements:
                    self.replicate(se)
            else:
                storage_elements.pop(0)
                for this_file in outputFiles:
                    for se in storage_elements:
                        this_file.replicate(se)

        if len(outputFiles) > 0:
            return outputFiles
        else:
            outputFiles.append(self)
            return outputFiles
コード例 #17
0
ファイル: TestGangaList.py プロジェクト: slangrock/ganga
 def testNonZero(self):
     """@ExpectedFailure"""
     assert not GangaList(
     ), 'An empty GangaList should be false, just like a list'
コード例 #18
0
    def __atomic_set__(self, _obj, _val):
        ## self: attribute being changed or Ganga.GPIDev.Base.Objects.Descriptor in which case getName(self) gives the name of the attribute being changed
        ## _obj: parent class which 'owns' the attribute
        ## _val: value of the attribute which we're about to set

        #if hasattr(_obj, getName(self)):
        #    if not isinstance(getattr(_obj, getName(self)), GangaObject):
        #        if type( getattr(_obj, getName(self)) ) == type(_val):
        #            object.__setattr__(_obj, getName(self), deepcopy(_val))
        #            return
#
#        if not isinstance(_obj, GangaObject) and type(_obj) == type(_val):
#            _obj = deepcopy(_val)
#            return

        obj = _obj
        temp_val = _val

        from Ganga.GPIDev.Lib.GangaList.GangaList import makeGangaList

        if hasattr(obj, '_checkset_name'):
            checkSet = self._bind_method(obj, self._checkset_name)
            if checkSet is not None:
                checkSet(temp_val)
        if hasattr(obj, '_filter_name'):
            this_filter = self._bind_method(obj, self._filter_name)
            if this_filter:
                val = this_filter(temp_val)
            else:
                val = temp_val
        else:
            val = temp_val

        # LOCKING
        obj._getWriteAccess()

        #self._check_getter()

        item = obj._schema[getName(self)]

        def cloneVal(v):
            GangaList = _getGangaList()
            if isinstance(v, (list, tuple, GangaList)):
                new_v = GangaList()
                for elem in v:
                    new_v.append(self.__cloneVal(elem, obj))
                return new_v
            else:
                return self.__cloneVal(v, obj)

        ## If the item has been defined as a sequence great, let's continue!
        if item['sequence']:
            _preparable = True if item['preparable'] else False
            if len(val) == 0:
                GangaList = _getGangaList()
                new_val = GangaList()
            else:
                if isinstance(item, Schema.ComponentItem):
                    new_val = makeGangaList(val, cloneVal, parent=obj, preparable=_preparable)
                else:
                    new_val = makeGangaList(val, parent=obj, preparable=_preparable)
        else:
            ## Else we need to work out what we've got.
            if isinstance(item, Schema.ComponentItem):
                GangaList = _getGangaList()
                if isinstance(val, (list, tuple, GangaList)):
                    ## Can't have a GangaList inside a GangaList easily so lets not
                    if isinstance(_obj, GangaList):
                        newListObj = []
                    else:
                        newListObj = GangaList()

                    self.__createNewList(newListObj, val, cloneVal)
                    #for elem in val:
                    #    newListObj.append(cloneVal(elem))
                    new_val = newListObj
                else:
                    new_val = cloneVal(val)
            else:
                new_val = val
                pass
            #val = deepcopy(val)

        if isinstance(new_val, Node):
            new_val._setParent(obj)

        obj.setNodeAttribute(getName(self), new_val)

        obj._setDirty()
コード例 #19
0
    def _create_subjob(self, job, dataset):
        logger.debug("_create_subjob")
        datatmp = []

        logger.debug("dataset size: %s" % str(len(dataset)))
        #logger.debug( "dataset: %s" % str(dataset) )

        from GangaLHCb.Lib.LHCbDataset.LHCbDataset import LHCbDataset

        if isinstance(dataset, LHCbDataset):
            for i in dataset:
                if isType(i, DiracFile):
                    datatmp.append(i)
                else:
                    logger.error(
                        "Unkown file-type %s, cannot perform split with file %s"
                        % (type(i), str(i)))
                    from Ganga.Core.exceptions import GangaException
                    raise GangaException(
                        "Unkown file-type %s, cannot perform split with file %s"
                        % (type(i), str(i)))
        elif type(dataset) == type([]) or isType(dataset, GangaList()):
            for this_file in dataset:
                if type(this_file) is str:
                    datatmp.append(allComponentFilters['gangafiles'](this_file,
                                                                     None))
                elif isType(this_file, IGangaFile):
                    datatmp.append(this_file)
                else:
                    logger.error("Unexpected type: %s" % str(type(this_file)))
                    logger.error("Wanted object to inherit from type: %s: %s" %
                                 (str(type(IGangaFile()))))
                    from Ganga.Core.exceptions import GangaException
                    x = GangaException("Unknown(unexpected) file object: %s" %
                                       this_file)
                    raise x
        elif type(dataset) is str:
            datatmp.append(DiracFile(lfn=dataset))
        else:
            logger.error("Unkown dataset type, cannot perform split here")
            from Ganga.Core.exceptions import GangaException
            logger.error("Dataset found: " + str(dataset))
            raise GangaException(
                "Unkown dataset type, cannot perform split here")

        logger.debug("Creating new Job in Splitter")
        j = Job()
        logger.debug("Copying From Job")
        j.copyFrom(
            stripProxy(job),
            ['splitter', 'subjobs', 'inputdata', 'inputsandbox', 'inputfiles'])
        logger.debug("Unsetting Splitter")
        j.splitter = None
        #logger.debug("Unsetting Merger")
        #j.merger = None
        #j.inputsandbox = [] ## master added automatically
        #j.inputfiles = []
        logger.debug("Setting InputData")
        j.inputdata = LHCbDataset(files=datatmp[:],
                                  persistency=self.persistency,
                                  depth=self.depth)
        #j.inputdata.XMLCatalogueSlice = self.XMLCatalogueSlice
        logger.debug("Returning new subjob")
        return j
コード例 #20
0
ファイル: DiracFile.py プロジェクト: slangrock/ganga
    def put(self, lfn='', force=False, uploadSE="", replicate=False):
        """
        Try to upload file sequentially to storage elements defined in configDirac['allDiracSE'].
        File will be uploaded to the first SE that the upload command succeeds for.

        The file is uploaded to the SE described by the DiracFile.defaultSE attribute

        Alternatively, the user can specify an uploadSE which contains an SE
        which the file is to be uploaded to.

        If the user wants to replicate this file(s) across all SE then they should state replicate = True.

        Return value will be either the stdout from the dirac upload command if not
        using the wildcard characters '*?[]' in the namePattern.
        If the wildcard characters are used then the return value will be a list containing
        newly created DiracFile objects which were the result of glob-ing the wildcards.

        The objects in this list will have been uploaded or had their failureReason attribute populated if the
        upload failed.
        """

        if self.lfn != "" and force == False and lfn == '':
            logger.warning(
                "Warning you're about to 'put' this DiracFile: %s on the grid as it already has an lfn: %s"
                % (self.namePattern, self.lfn))
            decision = raw_input('y / [n]:')
            while not (decision in ['y', 'n'] or decision == ''):
                decision = raw_input('y / [n]:')

            if decision == 'y':
                pass
            else:
                return

        if (lfn != '' and self.lfn != '') and force == False:
            logger.warning(
                "Warning you're attempting to put this DiracFile: %s" %
                self.namePattern)
            logger.warning("It currently has an LFN associated with it: %s" %
                           self.lfn)
            logger.warning(
                "Do you want to continue and attempt to upload to: %s" % lfn)
            decision = raw_input('y / [n]:')
            while not (decision in ['y', 'n', '']):
                decision = raw_input('y / [n]:')

            if decision == 'y':
                pass
            else:
                return

        if lfn != '':
            self.lfn = lfn

        # looks like will only need this for the interactive uploading of jobs.
        # Also if any backend need dirac upload on client then when downloaded
        # this will upload then delete the file.

        if self.namePattern == "":
            if self.lfn != '':
                logger.warning(
                    "'Put'-ing a file with ONLY an existing LFN makes no sense!"
                )
            raise GangaException(
                'Can\'t upload a file without a local file name.')

        sourceDir = self.localDir
        if self.localDir is None:
            sourceDir = os.getcwd()
            # attached to a job, use the joboutputdir
            if self._parent != None and os.path.isdir(
                    self.getJobObject().outputdir):
                sourceDir = self.getJobObject().outputdir

        if not os.path.isdir(sourceDir):
            raise GangaException(
                'localDir attribute is not a valid dir, don\'t know from which dir to take the file'
            )

        if regex.search(self.namePattern) is not None:
            if self.lfn != "":
                logger.warning(
                    "Cannot specify a single lfn for a wildcard namePattern")
                logger.warning("LFN will be generated automatically")
                self.lfn = ""

        selfConstructedLFN = False

        import glob
        if self.remoteDir == '' and self.lfn == '':
            import datetime
            t = datetime.datetime.now()
            this_date = t.strftime("%H.%M_%A_%d_%B_%Y")
            self.lfn = os.path.join(configDirac['DiracLFNBase'],
                                    'GangaFiles_%s' % this_date)
            selfConstructedLFN = True
        #if self.remoteDir == '' and self.lfn != '':
        #    self.remoteDir = configDirac['DiracLFNBase']

        if self.remoteDir[:4] == 'LFN:':
            lfn_base = self.remoteDir[4:]
        else:
            lfn_base = self.remoteDir

        if uploadSE != "":
            storage_elements = all_SE_list(uploadSE)
        else:
            storage_elements = all_SE_list(self.defaultSE)

        outputFiles = GangaList()
        for this_file in glob.glob(os.path.join(sourceDir, self.namePattern)):
            name = this_file

            if not os.path.exists(name):
                if not self.compressed:
                    raise GangaException(
                        'Cannot upload file. File "%s" must exist!' % name)
                name += '.gz'
                if not os.path.exists(name):
                    raise GangaException('File "%s" must exist!' % name)
            else:
                if self.compressed:
                    os.system('gzip -c %s > %s.gz' % (name, name))
                    name += '.gz'
                    if not os.path.exists(name):
                        raise GangaException('File "%s" must exist!' % name)

            if lfn == "":
                lfn = os.path.join(lfn_base, os.path.basename(name))

            if selfConstructedLFN is True:
                self.lfn = os.path.join(self.lfn, os.path.basename(name))

            lfn = self.lfn

            d = DiracFile()
            d.namePattern = os.path.basename(name)
            d.compressed = self.compressed
            d.localDir = sourceDir
            stderr = ''
            stdout = ''
            logger.info('Uploading file %s to %s as %s' %
                        (name, storage_elements[0], lfn))
            stdout = execute('uploadFile("%s", "%s", %s)' %
                             (lfn, name, str([storage_elements[0]])))
            if type(stdout) == str:
                logger.warning("Couldn't upload file '%s': %s" %
                               (os.path.basename(name), stdout))
                continue
            if stdout.get('OK', False) and lfn in stdout.get(
                    'Value', {'Successful': {}})['Successful']:
                # when doing the two step upload delete the temp file
                if self.compressed or self._parent != None:
                    os.remove(name)
                # need another eval as datetime needs to be included.
                guid = stdout['Value']['Successful'][lfn].get('GUID', '')
                if regex.search(self.namePattern) is not None:
                    d.lfn = lfn
                    d.remoteDir = os.path.dirname(lfn)
                    d.locations = stdout['Value']['Successful'][lfn].get(
                        'allDiracSE', '')
                    d.guid = guid
                    outputFiles.append(GPIProxyObjectFactory(d))
                    continue
                else:
                    self.lfn = lfn
                    self.remoteDir = os.path.dirname(lfn)
                    self.locations = stdout['Value']['Successful'][lfn].get(
                        'allDiracSE', '')
                    self.guid = guid
                # return ## WHY?
            else:
                failureReason = "Error in uploading file %s : %s" % (
                    os.path.basename(name), str(stdout))
                logger.error(failureReason)
                if regex.search(self.namePattern) is not None:
                    d.failureReason = failureReason
                    outputFiles.append(GPIProxyObjectFactory(d))
                    continue
                self.failureReason = failureReason
                return str(stdout)

        if replicate == True:

            if len(outputFiles) == 1 or len(outputFiles) == 0:
                storage_elements.pop(0)
                for se in storage_elements:
                    self.replicate(se)
            else:
                storage_elements.pop(0)
                for this_file in outputFiles:
                    for se in storage_elements:
                        this_file.replicate(se)

        if len(outputFiles) > 0:
            return GPIProxyObjectFactory(outputFiles)
        else:
            outputFiles.append(self)
            return GPIProxyObjectFactory(outputFiles)