def insertClassIntoGPI(class_): """ Insert a given class object into the GPI Args: class_ (class): This is the class object to insert into the GPI """ exportToGPI(_getName(class_), class_, "Objects") allPlugins.add(class_, class_._category, _getName(class_)) allPlugins._prev_found = {}
def removeClassFromGPI(class_): """ Remove a given class object from the GPI Args: class_ (class): This is the class object to remove from the GPI """ import Ganga.GPI del Ganga.GPI.__dict__[_getName(class_)] del allPlugins.all_dict[class_._category][_getName(class_)] allPlugins._prev_found = {}
def _checkConfig(self): """ Check that the MassStorageFile configuration is correct """ if not getConfig('Output')[_getName(self)]['uploadOptions']['path']: raise GangaException( 'Unable to create MassStorageFile. Check your configuration!')
def test_b_testClientSideComplete(self): """Test the client side code whilst stil using the Local backend""" from Ganga.GPI import jobs assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client' j = jobs[-1] assert sleep_until_completed(j) for sj in j.subjobs: output_dir = stripProxy(sj).getOutputWorkspace(create=False).getPath() assert os.path.isdir(output_dir) == True # Check that the files have been removed from the output worker dir for input_f in j.inputfiles: assert not os.path.isfile(os.path.join(output_dir, input_f.namePattern)) # Check that the files were placed in the correct place on storage output_dir = os.path.join(self.outputFilePath, str(j.id), str(sj.id)) for file_ in j.inputfiles: assert os.path.isfile(os.path.join(output_dir, file_.namePattern)) # Check that wildcard expansion happened correctly assert len(stripProxy(stripProxy(sj).outputfiles[0]).subfiles) == 2 assert len(sj.outputfiles) == 2
def _mkdir(self, massStoragePath, exitIfNotExist=False): """ Creates a folder on the mass Storage corresponding to the given path Args: massStoragePath (str): This is the path we want to make if it doesn't exist. """ massStorageConfig = getConfig('Output')[_getName(self)]['uploadOptions'] mkdir_cmd = massStorageConfig['mkdir_cmd'] ls_cmd = massStorageConfig['ls_cmd'] # create the last directory (if not exist) from the config path pathToDirName = os.path.dirname(massStoragePath) dirName = os.path.basename(massStoragePath) directoryExists = False (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s %s' % (ls_cmd, quote(pathToDirName))) if exitcode != 0 and exitIfNotExist: self.handleUploadFailure(mystderr, '1) %s %s' % (ls_cmd, pathToDirName)) raise GangaException(mystderr) for directory in mystdout.split('\n'): if directory.strip() == dirName: directoryExists = True break if not directoryExists: (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s -p %s' % (mkdir_cmd, quote(massStoragePath))) if exitcode != 0: self.handleUploadFailure(mystderr, '2) %s %s' % (mkdir_cmd, massStoragePath)) raise GangaException(mystderr)
def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts=[('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), self.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageClientInput, self).setUp(extra_opts=extra_opts)
def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts=[('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), TestMassStorageWN.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageWN, self).setUp(extra_opts=extra_opts)
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ massStorageCommands = [] massStorageConfig = getConfig('Output')[_getName( self)]['uploadOptions'] for outputFile in outputFiles: outputfilenameformat = 'None' if outputFile.outputfilenameformat != None and outputFile.outputfilenameformat != '': outputfilenameformat = outputFile.outputfilenameformat massStorageCommands.append([ 'massstorage', outputFile.namePattern, outputfilenameformat, massStorageConfig['mkdir_cmd'], massStorageConfig['cp_cmd'], massStorageConfig['ls_cmd'], massStorageConfig['path'] ]) script_location = os.path.join( os.path.dirname( os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/MassStorageFileWNScript.py.template') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') jobfqid = self.getJobObject().fqid jobid = jobfqid subjobid = '' if (jobfqid.find('.') > -1): jobid = jobfqid.split('.')[0] subjobid = jobfqid.split('.')[1] replace_dict = { '###MASSSTORAGECOMMANDS###': repr(massStorageCommands), '###PATTERNSTOZIP###': str(patternsToZip), '###INDENT###': indent, '###POSTPROCESSLOCATIONSFP###': postProcessLocationsFP, '###FULLJOBDIR###': str(jobfqid.replace('.', os.path.sep)), '###JOBDIR###': str(jobid), '###SUBJOBDIR###': str(subjobid) } for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def __init__(self, namePattern='', localDir='', **kwds): """ SharedFile construction Args: namePattern (str): is the pattern of the output file that has to be written into mass storage localDir (str): This is the optional local directory of a file to be uploaded to mass storage """ if getConfig('Output')[_getName(self)]['uploadOptions']['path'] is None: logger.error("In order to use the SharedFile class you will need to define the path directory in your .gangarc") raise GangaException("In order to use the SharedFile class you will need to define the path directory in your .gangarc") super(SharedFile, self).__init__(namePattern, localDir, **kwds)
def internalCopyTo(self, targetPath): """ Copy a the file to the local storage using the get mechanism Args: targetPath (str): Target path where the file is to copied to """ to_location = targetPath cp_cmd = getConfig('Output')[_getName(self)]['uploadOptions']['cp_cmd'] for location in self.locations: targetLocation = os.path.join(to_location, os.path.basename(location)) self.execSyscmdSubprocess('%s %s %s' % (cp_cmd, quote(location), quote(targetLocation)))
def processWildcardMatches(self): if self.subfiles: return self.subfiles if regex.search(self.namePattern): ls_cmd = getConfig('Output')[_getName(self)]['uploadOptions']['ls_cmd'] exitcode, output, m = self.shell.cmd1(ls_cmd + ' ' + self.inputremotedirectory, capture_stderr=True) for filename in output.split('\n'): if fnmatch(filename, self.namePattern): subfile = copy.deepcopy(self) subfile.namepattern = filename subfile.inputremotedirectory = self.inputremotedirectory self.subfiles.append(subfile)
def internalCopyTo(self, targetPath): """ Copy a the file to the local storage using the get mechanism Args: targetPath (str): Target path where the file is to copied to """ to_location = targetPath cp_cmd = getConfig('Output')[_getName(self)]['uploadOptions']['cp_cmd'] for location in self.locations: targetLocation = os.path.join(to_location, os.path.basename(location)) self.execSyscmdSubprocess( '%s %s %s' % (cp_cmd, quote(location), quote(targetLocation)))
def accessURL(self): # Need to come up with a prescription based upon the server address and # file on EOS or elsewhere to return a full URL which we can pass to # ROOT... protoPath = getConfig('Output')[_getName(self)]['defaultProtocol'] myLocations = self.location() accessURLs = [] for _file in myLocations: accessURLs.append(protoPath + os.path.join(os.sep, _file)) return accessURLs
def getWNScriptDownloadCommand(self, indent): ## FIXME fix me for the situation of multiple files? script = """\n ###INDENT###os.system(\'###CP_COMMAND###\') """ cp_cmd = '%s %s .' % (getConfig('Output')[_getName(self)]['uploadOptions']['cp_cmd'], quote(self.locations[0])) replace_dict = { '###INDENT###' : indent, '###CP_COMMAND###' : cp_cmd } for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def accessURL(self): # Need to come up with a prescription based upon the server address and # file on EOS or elsewhere to return a full URL which we can pass to # ROOT... protoPath = getConfig('Output')[_getName(self)]['defaultProtocol'] myLocations = self.location() accessURLs = [] for file in myLocations: accessURLs.append(protoPath + os.path.join(os.sep, file)) return accessURLs
def __init__(self, namePattern='', localDir='', **kwds): """ SharedFile construction Args: namePattern (str): is the pattern of the output file that has to be written into mass storage localDir (str): This is the optional local directory of a file to be uploaded to mass storage """ if getConfig('Output')[_getName( self)]['uploadOptions']['path'] is None: logger.error( "In order to use the SharedFile class you will need to define the path directory in your .gangarc" ) raise GangaException( "In order to use the SharedFile class you will need to define the path directory in your .gangarc" ) super(SharedFile, self).__init__(namePattern, localDir, **kwds)
def getWNScriptDownloadCommand(self, indent): ## FIXME fix me for the situation of multiple files? script = """\n ###INDENT###os.system(\'###CP_COMMAND###\') """ cp_cmd = '%s %s .' % (getConfig('Output')[_getName( self)]['uploadOptions']['cp_cmd'], quote(self.locations[0])) replace_dict = {'###INDENT###': indent, '###CP_COMMAND###': cp_cmd} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def getWNInjectedScript(self, outputFiles, indent, patternsToZip, postProcessLocationsFP): """ Returns script that have to be injected in the jobscript for postprocessing on the WN """ massStorageCommands = [] massStorageConfig = getConfig('Output')[_getName(self)]['uploadOptions'] for outputFile in outputFiles: outputfilenameformat = 'None' if outputFile.outputfilenameformat != None and outputFile.outputfilenameformat != '': outputfilenameformat = outputFile.outputfilenameformat massStorageCommands.append(['massstorage', outputFile.namePattern, outputfilenameformat, massStorageConfig['mkdir_cmd'], massStorageConfig['cp_cmd'], massStorageConfig['ls_cmd'], massStorageConfig['path']]) script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))), 'scripts/MassStorageFileWNScript.py.template') from Ganga.GPIDev.Lib.File import FileUtils script = FileUtils.loadScript(script_location, '###INDENT###') jobfqid = self.getJobObject().fqid jobid = jobfqid subjobid = '' if (jobfqid.find('.') > -1): jobid = jobfqid.split('.')[0] subjobid = jobfqid.split('.')[1] replace_dict = {'###MASSSTORAGECOMMANDS###' : repr(massStorageCommands), '###PATTERNSTOZIP###' : str(patternsToZip), '###INDENT###' : indent, '###POSTPROCESSLOCATIONSFP###' : postProcessLocationsFP, '###FULLJOBDIR###' : str(jobfqid.replace('.', os.path.sep)), '###JOBDIR###' : str(jobid), '###SUBJOBDIR###' : str(subjobid)} for k, v in replace_dict.iteritems(): script = script.replace(str(k), str(v)) return script
def processWildcardMatches(self): if self.subfiles: return self.subfiles if regex.search(self.namePattern): ls_cmd = getConfig('Output')[_getName( self)]['uploadOptions']['ls_cmd'] exitcode, output, m = self.shell.cmd1(ls_cmd + ' ' + self.inputremotedirectory, capture_stderr=True) for filename in output.split('\n'): if fnmatch(filename, self.namePattern): subfile = copy.deepcopy(self) subfile.namepattern = filename subfile.inputremotedirectory = self.inputremotedirectory self.subfiles.append(subfile)
def test_a_testClientSideSubmit(self): """Test the client side code whilst stil using the Local backend""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter TestMassStorageClient.cleanUp() assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client' file_1 = generate_unique_temp_file(TestMassStorageClient._ext) file_2 = generate_unique_temp_file(TestMassStorageClient._ext) TestMassStorageClient._managed_files.append(file_1) TestMassStorageClient._managed_files.append(file_2) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2)] j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)]) j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)] j.submit()
def _mkdir(self, massStoragePath, exitIfNotExist=False): """ Creates a folder on the mass Storage corresponding to the given path Args: massStoragePath (str): This is the path we want to make if it doesn't exist. """ massStorageConfig = getConfig('Output')[_getName( self)]['uploadOptions'] mkdir_cmd = massStorageConfig['mkdir_cmd'] ls_cmd = massStorageConfig['ls_cmd'] # create the last directory (if not exist) from the config path pathToDirName = os.path.dirname(massStoragePath) dirName = os.path.basename(massStoragePath) directoryExists = False (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess( '%s %s' % (ls_cmd, quote(pathToDirName))) if exitcode != 0 and exitIfNotExist: self.handleUploadFailure(mystderr, '1) %s %s' % (ls_cmd, pathToDirName)) raise GangaException(mystderr) for directory in mystdout.split('\n'): if directory.strip() == dirName: directoryExists = True break if not directoryExists: (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess( '%s -p %s' % (mkdir_cmd, quote(massStoragePath))) if exitcode != 0: self.handleUploadFailure( mystderr, '2) %s %s' % (mkdir_cmd, massStoragePath)) raise GangaException(mystderr)
class TestMassStorageGetPut(GangaUnitTest): """Testing the get/put/copyTo methods of MassStorage""" _temp_files = [] _managed_files = [] # Num of sj in tests sj_len = 3 fileClass = addProxy(MassStorageFile) # Where on local storage we want to have our 'MassStorage solution' outputFilePath = '/tmp/Test' + _getName(fileClass) + 'GetPut' # This sets up a MassStorageConfiguration which works by placing a file on local storage somewhere we can test using standard tools MassStorageTestConfig = { 'defaultProtocol': 'file://', 'fileExtensions': [''], 'uploadOptions': { 'path': outputFilePath, 'cp_cmd': 'cp', 'ls_cmd': 'ls', 'mkdir_cmd': 'mkdir -p' }, 'backendPostprocess': { 'LSF': 'client', 'LCG': 'client', 'ARC': 'client', 'Dirac': 'client', 'PBS': 'client', 'Interactive': 'client', 'Local': 'client', 'CREAM': 'client' } } def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts = [('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), self.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageGetPut, self).setUp(extra_opts=extra_opts) @staticmethod def cleanUp(): """ Cleanup the current temp jobs """ from Ganga.GPI import jobs for j in jobs: shutil.rmtree(j.backend.workdir, ignore_errors=True) j.remove() @classmethod def setUpClass(cls): """ This creates a safe place to put the files into 'mass-storage' """ cls.outputFilePath = tempfile.mkdtemp() cls.MassStorageTestConfig['uploadOptions']['path'] = cls.outputFilePath @classmethod def tearDownClass(cls): """ Cleanup the current temp objects """ for file_ in cls._temp_files: os.unlink(file_) cls._temp_files = [] for file_ in cls._managed_files: os.unlink(os.path.join(cls.outputFilePath, file_.namePattern)) cls._managed_files = [] shutil.rmtree(cls.outputFilePath, ignore_errors=True) def test_a_test_put(self): """Test that a job can be submitted with inputfiles in the input""" MassStorageFile = self.fileClass _ext = '.root' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) self._temp_files.append(file_1) self._temp_files.append(file_2) msf_1 = MassStorageFile(file_1) msf_2 = MassStorageFile(file_2) self._managed_files.append(msf_1) self._managed_files.append(msf_2) msf_1.put() msf_2.put() for file_ in [msf for msf in (msf_1, msf_2)]: assert os.path.isfile( os.path.join(self.outputFilePath, file_.namePattern)) file_.localDir = '' assert file_.localDir == '' def test_b_test_get(self): """Test that the files were made accessible to the WN area and collected as LocalFile objects in outputfiles""" from Ganga.GPI import Job tmpdir = tempfile.mkdtemp() # Test in the case that the files don't have a parent or a localDir for file_ in self._managed_files: file_.localDir = '' try: assert file_.localDir == '' file_.get() print("Unexpected localDir: %s" % file_.localDir) failed = False except GangaException: failed = True assert failed # Test in the case that the localDir has been set for file_ in self._managed_files: file_.localDir = tmpdir print("localDir: %s" % file_.localDir) file_.get() assert os.path.isfile(os.path.join(tmpdir, file_.namePattern)) file_.localDir = '' assert file_.localDir == '' # Test in the case that the object is 'owned' by a Job j = Job() outputdir = stripProxy(j).getOutputWorkspace(create=True).getPath() j.outputfiles = self._managed_files for file_ in j.outputfiles: assert stripProxy(file_).getJobObject() is stripProxy(j) assert file_.localDir == '' file_.get() assert os.path.isfile(os.path.join(outputdir, file_.namePattern)) shutil.rmtree(tmpdir, ignore_errors=True) self.cleanUp() def test_c_test_copyTo(self): """ Test the new copyTo interface""" tmpdir = tempfile.mkdtemp() for file_ in self._managed_files: file_.localDir = '' stripProxy(file_).copyTo(tmpdir) assert os.path.isfile(os.path.join(tmpdir, file_.namePattern)) shutil.rmtree(tmpdir, ignore_errors=True)
def put(self): """ Creates and executes commands for file upload to mass storage (Castor), this method will be called on the client """ sourceDir = '' # if used as a stand alone object if self._getParent() is None: if self.localDir == '': _CWD = os.getcwd() if os.path.isfile(os.path.join(_CWD, self.namePattern)): sourceDir = _CWD else: logger.warning('localDir attribute is empty, don\'t know from which dir to take the file') return else: sourceDir = self.localDir (result, message) = self.validate() if result == False: logger.warning(message) return else: job = self.getJobObject() sourceDir = job.outputdir # if there are subjobs, the put method will be called on every subjob # and will upload the resulted output file if len(job.subjobs) > 0: return massStorageConfig = getConfig('Output')[_getName(self)]['uploadOptions'] cp_cmd = massStorageConfig['cp_cmd'] ls_cmd = massStorageConfig['ls_cmd'] massStoragePath = massStorageConfig['path'] try: self._mkdir(massStoragePath, exitIfNotExist=True) except GangaException: return # the folder part of self.outputfilenameformat folderStructure = '' # the file name part of self.outputfilenameformat filenameStructure = '' if self._getParent() is not None: jobfqid = self.getJobObject().fqid jobid = jobfqid subjobid = '' if (jobfqid.find('.') > -1): jobid = jobfqid.split('.')[0] subjobid = jobfqid.split('.')[1] if self.outputfilenameformat is None: filenameStructure = '{fname}' # create jid/sjid directories folderStructure = jobid if subjobid != '': folderStructure = os.path.join(jobid, subjobid) else: filenameStructure = os.path.basename(self.outputfilenameformat) filenameStructure = filenameStructure.replace('{jid}', jobid) folderStructure = os.path.dirname(self.outputfilenameformat) folderStructure = folderStructure.replace('{jid}', jobid) if subjobid != '': filenameStructure = filenameStructure.replace('{sjid}', subjobid) folderStructure = folderStructure.replace('{sjid}', subjobid) else: if self.outputfilenameformat != None: folderStructure = os.path.dirname(self.outputfilenameformat) filenameStructure = os.path.basename(self.outputfilenameformat) else: filenameStructure = '{fname}' # create the folder structure if folderStructure: massStoragePath = os.path.join(massStoragePath, folderStructure) try: self._mkdir(massStoragePath) except GangaException: return # here filenameStructure has replaced jid and sjid if any, and only not # replaced keyword is fname fileName = self.namePattern if self.compressed: fileName = '%s.gz' % self.namePattern if regex.search(fileName) is not None: for currentFile in glob.glob(os.path.join(sourceDir, fileName)): finalFilename = filenameStructure.replace('{fname}', os.path.basename(currentFile)) (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s %s %s' %\ (cp_cmd, quote(currentFile), quote(os.path.join(massStoragePath, finalFilename)))) d = copy.deepcopy(self) d.namePattern = os.path.basename(currentFile) d.localDir = os.path.dirname(currentFile) d.compressed = self.compressed if exitcode != 0: self.handleUploadFailure(mystderr, '4) %s %s %s' % (cp_cmd, currentFile, os.path.join(massStoragePath, finalFilename))) else: logger.info('%s successfully uploaded to mass storage as %s' % (currentFile, os.path.join(massStoragePath, finalFilename))) d.locations = os.path.join(massStoragePath, os.path.basename(finalFilename)) self.subfiles.append(d) else: currentFile = os.path.join(sourceDir, fileName) finalFilename = filenameStructure.replace('{fname}', os.path.basename(currentFile)) (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s %s %s' %\ (cp_cmd, quote(currentFile), quote(os.path.join(massStoragePath, finalFilename)))) if exitcode != 0: self.handleUploadFailure(mystderr, '5) %s %s %s' % (cp_cmd, currentFile, os.path.join(massStoragePath, finalFilename))) else: logger.info('%s successfully uploaded to mass storage as %s' % (currentFile, os.path.join(massStoragePath, finalFilename))) location = os.path.join(massStoragePath, os.path.basename(finalFilename)) if location not in self.locations: self.locations.append(location)
def validate(self): # if the user has set outputfilenameformat, validate for presence of # jid, sjid and fname keywords depending on job type - split or # non-split if self.outputfilenameformat != None: searchFor = ['{fname}'] isJob = False isSplitJob = False if self._getParent() != None: isJob = True if stripProxy(self.getJobObject()).master is not None: isSplitJob = True searchFor.append('{sjid}') missingKeywords = [] for item in searchFor: if self.outputfilenameformat.find(item) == -1: missingKeywords.append(item) if len(missingKeywords): return ( False, 'Error in %s.outputfilenameformat field : missing keywords %s ' % (_getName(self), ','.join(missingKeywords))) if isSplitJob == False and self.outputfilenameformat.find( '{sjid}') > -1: return ( False, 'Error in %s.outputfilenameformat field : job is non-split, but {\'sjid\'} keyword found' % _getName(self)) if isJob == False and self.outputfilenameformat.find( '{sjid}') > -1: return ( False, 'Error in %s.outputfilenameformat field : no parent job, but {\'sjid\'} keyword found' % _getName(self)) if isJob == False and self.outputfilenameformat.find('{jid}') > -1: return ( False, 'Error in %s.outputfilenameformat field : no parent job, but {\'jid\'} keyword found' % _getName(self)) invalidUnixChars = ['"', ' '] test = self.outputfilenameformat.replace('{jid}', 'a').replace( '{sjid}', 'b').replace('{fname}', 'c') for invalidUnixChar in invalidUnixChars: if test.find(invalidUnixChar) > -1: return ( False, 'Error in %s.outputfilenameformat field : invalid char %s found' % (_getName(self), invalidUnixChar)) return (True, '')
def put(self): """ Creates and executes commands for file upload to mass storage (Castor), this method will be called on the client """ sourceDir = '' # if used as a stand alone object if self._getParent() is None: if self.localDir == '': _CWD = os.getcwd() if os.path.isfile(os.path.join(_CWD, self.namePattern)): sourceDir = _CWD else: logger.warning( 'localDir attribute is empty, don\'t know from which dir to take the file' ) return else: sourceDir = self.localDir (result, message) = self.validate() if result == False: logger.warning(message) return else: job = self.getJobObject() sourceDir = job.outputdir # if there are subjobs, the put method will be called on every subjob # and will upload the resulted output file if len(job.subjobs) > 0: return massStorageConfig = getConfig('Output')[_getName( self)]['uploadOptions'] cp_cmd = massStorageConfig['cp_cmd'] ls_cmd = massStorageConfig['ls_cmd'] massStoragePath = massStorageConfig['path'] try: self._mkdir(massStoragePath, exitIfNotExist=True) except GangaException: return # the folder part of self.outputfilenameformat folderStructure = '' # the file name part of self.outputfilenameformat filenameStructure = '' if not self.outputfilenameformat: filenameStructure = '{fname}' parent = self._getParent() if parent is not None: folderStructure = '{jid}' if parent._getParent() is not None: folderStructure = os.path.join(folderStructure, '{sjid}') else: folderStructure = os.path.dirname(self.outputfilenameformat) filenameStructure = os.path.basename(self.outputfilenameformat) folderStructure = self.expandString(folderStructure) # create the folder structure if folderStructure: massStoragePath = os.path.join(massStoragePath, folderStructure) try: self._mkdir(massStoragePath) except GangaException: return # here filenameStructure has replaced jid and sjid if any, and only not # replaced keyword is fname fileName = self.namePattern if self.compressed: fileName = '%s.gz' % self.namePattern if regex.search(fileName) is not None: for currentFile in glob.glob(os.path.join(sourceDir, fileName)): finalFilename = self.expandString( filenameStructure, os.path.basename(currentFile)) (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s %s %s' %\ (cp_cmd, quote(currentFile), quote(os.path.join(massStoragePath, finalFilename)))) d = copy.deepcopy(self) d.namePattern = os.path.basename(currentFile) d.localDir = os.path.dirname(currentFile) d.compressed = self.compressed if exitcode != 0: self.handleUploadFailure( mystderr, '4) %s %s %s' % (cp_cmd, currentFile, os.path.join(massStoragePath, finalFilename))) else: logger.info( '%s successfully uploaded to mass storage as %s' % (currentFile, os.path.join(massStoragePath, finalFilename))) d.locations = os.path.join(massStoragePath, os.path.basename(finalFilename)) self.subfiles.append(d) else: currentFile = os.path.join(sourceDir, fileName) finalFilename = self.expandString(filenameStructure, fileName) (exitcode, mystdout, mystderr) = self.execSyscmdSubprocess('%s %s %s' %\ (cp_cmd, quote(currentFile), quote(os.path.join(massStoragePath, finalFilename)))) if exitcode != 0: self.handleUploadFailure( mystderr, '5) %s %s %s' % (cp_cmd, currentFile, os.path.join(massStoragePath, finalFilename))) else: logger.info('%s successfully uploaded to mass storage as %s' % (currentFile, os.path.join(massStoragePath, finalFilename))) location = os.path.join(massStoragePath, os.path.basename(finalFilename)) if location not in self.locations: self.locations.append(location)
def validate(self): # if the user has set outputfilenameformat, validate for presence of # jid, sjid and fname keywords depending on job type - split or # non-split if self.outputfilenameformat != None: searchFor = ['{fname}'] isJob = False isSplitJob = False if self._getParent() != None: isJob = True if stripProxy(self.getJobObject()).master is not None: isSplitJob = True searchFor.append('{sjid}') missingKeywords = [] for item in searchFor: if self.outputfilenameformat.find(item) == -1: missingKeywords.append(item) if len(missingKeywords): return (False, 'Error in %s.outputfilenameformat field : missing keywords %s ' % (_getName(self), ','.join(missingKeywords))) if isSplitJob == False and self.outputfilenameformat.find('{sjid}') > -1: return (False, 'Error in %s.outputfilenameformat field : job is non-split, but {\'sjid\'} keyword found' % _getName(self)) if isJob == False and self.outputfilenameformat.find('{sjid}') > -1: return (False, 'Error in %s.outputfilenameformat field : no parent job, but {\'sjid\'} keyword found' % _getName(self)) if isJob == False and self.outputfilenameformat.find('{jid}') > -1: return (False, 'Error in %s.outputfilenameformat field : no parent job, but {\'jid\'} keyword found' % _getName(self)) invalidUnixChars = ['"', ' '] test = self.outputfilenameformat.replace('{jid}', 'a').replace('{sjid}', 'b').replace('{fname}', 'c') for invalidUnixChar in invalidUnixChars: if test.find(invalidUnixChar) > -1: return (False, 'Error in %s.outputfilenameformat field : invalid char %s found' % (_getName(self), invalidUnixChar)) return (True, '')
def remove(self, force=False, removeLocal=False): """ Removes file from remote storage ONLY by default """ massStorageConfig = getConfig('Output')[_getName(self)]['uploadOptions'] rm_cmd = massStorageConfig['rm_cmd'] if force == True: _auto_delete = True else: _auto_delete = False for i in self.locations: if not _auto_delete: keyin = None while keyin is None: keyin = raw_input("Do you want to delete file %s at Location: %s ? [y/n] " % (str(self.namePattern), str(i))) if keyin.lower() == 'y': _delete_this = True elif keyin.lower() == 'n': _delete_this = False else: logger.warning("y/n please!") keyin = None else: _delete_this = True if _delete_this: logger.info("Deleting File at Location: %s") self.execSyscmdSubprocess('%s %s' % (rm_cmd, quote(i))) self.locations.pop(i) if removeLocal: sourceDir = '' if self.localDir == '': _CWD = os.getcwd() if os.path.isfile(os.path.join(_CWD, self.namePattern)): sourceDir = _CWD else: sourceDir = self.localDir _localFile = os.path.join(sourceDir, self.namePattern) if os.path.isfile(_localFile): if force: _actual_delete = True else: keyin = None while keyin is None: keyin = raw_input("Do you want to remove the local File: %s ? ([y]/n) " % str(_localFile)) if keyin.lower() in ['y', '']: _actual_delete = True elif keyin.lower() == 'n': _actual_delete = False else: logger.warning("y/n please!") keyin = None if _actual_delete: remove_filename = _localFile + "_" + str(time.time()) + '__to_be_deleted_' try: os.rename(_localFile, remove_filename) except OSError as err: logger.warning("Error in first stage of removing file: %s" % remove_filename) remove_filename = _localFile try: os.remove(remove_filename) except OSError as err: if err.errno != errno.ENOENT: logger.error("Error in removing file: %s" % str(remove_filename)) raise pass return
def __repr__(self): """Get the representation of the file.""" return "%s(namePattern='%s')" % (_getName(self), self.namePattern)
class TestMassStorageClient(GangaUnitTest): """test for sjid in filename names explain each test""" _managed_files = [] # Num of sj in tests sj_len = 3 fileClass = addProxy(MassStorageFile) # Where on local storage we want to have our 'MassStorage solution' outputFilePath = '/tmp/Test' + _getName(fileClass) + 'Client' # This sets up a MassStorageConfiguration which works by placing a file on local storage somewhere we can test using standard tools MassStorageTestConfig = {'defaultProtocol': 'file://', 'fileExtensions': [''], 'uploadOptions': {'path': outputFilePath, 'cp_cmd': 'cp', 'ls_cmd': 'ls', 'mkdir_cmd': 'mkdir'}, 'backendPostprocess': {'LSF': 'client', 'LCG': 'client', 'ARC': 'client', 'Dirac': 'client', 'PBS': 'client', 'Interactive': 'client', 'Local': 'client', 'CREAM': 'client'}} _ext = '.root' def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts=[('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), TestMassStorageClient.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageClient, self).setUp(extra_opts=extra_opts) @staticmethod def cleanUp(): """ Cleanup the current temp objects """ from Ganga.GPI import jobs for j in jobs: shutil.rmtree(j.backend.workdir, ignore_errors=True) j.remove() @classmethod def setUpClass(cls): """ This creates a safe place to put the files into 'mass-storage' """ cls.outputFilePath = tempfile.mkdtemp() cls.MassStorageTestConfig['uploadOptions']['path'] = cls.outputFilePath @classmethod def tearDownClass(cls): """ Cleanup the current temp objects """ for file_ in cls._managed_files: os.unlink(file_) cls._managed_files = [] shutil.rmtree(cls.outputFilePath, ignore_errors=True) def test_a_testClientSideSubmit(self): """Test the client side code whilst stil using the Local backend""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter TestMassStorageClient.cleanUp() assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client' file_1 = generate_unique_temp_file(TestMassStorageClient._ext) file_2 = generate_unique_temp_file(TestMassStorageClient._ext) TestMassStorageClient._managed_files.append(file_1) TestMassStorageClient._managed_files.append(file_2) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2)] j.splitter = ArgSplitter(args = [[_] for _ in range(TestMassStorageClient.sj_len)]) j.outputfiles = [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)] j.submit() def test_b_testClientSideComplete(self): """Test the client side code whilst stil using the Local backend""" from Ganga.GPI import jobs assert getConfig('Output')[_getName(self.fileClass)]['backendPostprocess']['Local'] == 'client' j = jobs[-1] assert sleep_until_completed(j) for sj in j.subjobs: output_dir = stripProxy(sj).getOutputWorkspace(create=False).getPath() assert os.path.isdir(output_dir) == True # Check that the files have been removed from the output worker dir for input_f in j.inputfiles: assert not os.path.isfile(os.path.join(output_dir, input_f.namePattern)) # Check that the files were placed in the correct place on storage output_dir = os.path.join(self.outputFilePath, str(j.id), str(sj.id)) for file_ in j.inputfiles: assert os.path.isfile(os.path.join(output_dir, file_.namePattern)) # Check that wildcard expansion happened correctly assert len(stripProxy(stripProxy(sj).outputfiles[0]).subfiles) == 2 assert len(sj.outputfiles) == 2 def test_c_testCopyJob(self): """ Test copying a completed job with a wildcard in the outputfiles """ from Ganga.GPI import jobs j = jobs[-1] j2 = j.copy() assert len(j2.outputfiles) == 1 MassStorageFile = self.fileClass assert j2.outputfiles == [MassStorageFile(namePattern='*'+TestMassStorageClient._ext)] assert len(j2.inputfiles) == 2 self.cleanUp()
def remove(self, force=False, removeLocal=False): """ Removes file from remote storage ONLY by default """ massStorageConfig = getConfig('Output')[_getName( self)]['uploadOptions'] rm_cmd = massStorageConfig['rm_cmd'] if force == True: _auto_delete = True else: _auto_delete = False for i in self.locations: if not _auto_delete: keyin = None while keyin is None: keyin = raw_input( "Do you want to delete file %s at Location: %s ? [y/n] " % (str(self.namePattern), str(i))) if keyin.lower() == 'y': _delete_this = True elif keyin.lower() == 'n': _delete_this = False else: logger.warning("y/n please!") keyin = None else: _delete_this = True if _delete_this: logger.info("Deleting File at Location: %s") self.execSyscmdSubprocess('%s %s' % (rm_cmd, quote(i))) self.locations.pop(i) if removeLocal: sourceDir = '' if self.localDir == '': _CWD = os.getcwd() if os.path.isfile(os.path.join(_CWD, self.namePattern)): sourceDir = _CWD else: sourceDir = self.localDir _localFile = os.path.join(sourceDir, self.namePattern) if os.path.isfile(_localFile): if force: _actual_delete = True else: keyin = None while keyin is None: keyin = raw_input( "Do you want to remove the local File: %s ? ([y]/n) " % str(_localFile)) if keyin.lower() in ['y', '']: _actual_delete = True elif keyin.lower() == 'n': _actual_delete = False else: logger.warning("y/n please!") keyin = None if _actual_delete: remove_filename = _localFile + "_" + str( time.time()) + '__to_be_deleted_' try: os.rename(_localFile, remove_filename) except OSError as err: logger.warning( "Error in first stage of removing file: %s" % remove_filename) remove_filename = _localFile try: os.remove(remove_filename) except OSError as err: if err.errno != errno.ENOENT: logger.error("Error in removing file: %s" % str(remove_filename)) raise pass return
class TestMassStorageWN(GangaUnitTest): """Testing MassStorage when completing a file""" _managed_files = [] # Num of sj in tests sj_len = 3 fileClass = addProxy(MassStorageFile) # Where on local storage we want to have our 'MassStorage solution' outputFilePath = '/tmp/Test' + _getName(fileClass) + 'WN' # This sets up a MassStorageConfiguration which works by placing a file on local storage somewhere we can test using standard tools MassStorageTestConfig = {'defaultProtocol': 'file://', 'fileExtensions': [''], 'uploadOptions': {'path': outputFilePath, 'cp_cmd': 'cp', 'ls_cmd': 'ls', 'mkdir_cmd': 'mkdir'}, 'backendPostprocess': {'LSF': 'WN', 'LCG': 'client', 'ARC': 'client', 'Dirac': 'client', 'PBS': 'WN', 'Interactive': 'client', 'Local': 'WN', 'CREAM': 'client'}} standardFormat = '{jid}/{fname}' extendedFormat = '{jid}/{sjid}/{fname}' customOutputFormat = '{jid}_{sjid}_{fname}' def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts=[('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), TestMassStorageWN.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageWN, self).setUp(extra_opts=extra_opts) @staticmethod def cleanUp(): """ Cleanup the current job objects """ from Ganga.GPI import jobs for j in jobs: shutil.rmtree(j.backend.workdir, ignore_errors=True) j.remove() @classmethod def setUpClass(cls): """ This creates a safe place to put the files into 'mass-storage' """ cls.outputFilePath = tempfile.mkdtemp() cls.MassStorageTestConfig['uploadOptions']['path'] = cls.outputFilePath @classmethod def tearDownClass(cls): """ Cleanup the current temp objects """ for file_ in cls._managed_files: os.unlink(file_) cls._managed_files = [] shutil.rmtree(cls.outputFilePath, ignore_errors=True) def test_a_Submit(self): """Test the ability to submit a job with some LocalFiles""" MassStorageFile = self.fileClass from Ganga.GPI import jobs, Job, LocalFile _ext = '.txt' file_1 = generate_unique_temp_file(_ext) TestMassStorageWN._managed_files.append(file_1) j = Job() j.inputfiles = [LocalFile(file_1)] j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.standardFormat)] j.submit() for f in j.outputfiles: assert f.outputfilenameformat == self.standardFormat def test_b_Completed(self): """Test the job completed and the output files exit `in storage`""" from Ganga.GPI import jobs j = jobs[-1] assert sleep_until_completed(j) # Check that we've still got 1 file everywhere we expect 1 assert len(j.inputfiles) == 1 assert len(j.outputfiles) == 1 # 1 file after wildcard expansion assert len(stripProxy(stripProxy(j).outputfiles[0]).subfiles) == 1 assert len(j.outputfiles) == 1 # Test that these strings are sensible assert j.outputfiles[0].namePattern != '' assert j.outputfiles[0].namePattern[0] != '*' assert j.outputfiles[0].locations != [''] assert isinstance(j.outputfiles[0].locations[0], str) is True assert j.outputfiles[0].accessURL() != [''] assert isinstance(j.outputfiles[0].accessURL()[0], str) is True # Check that the output file exists on 'storage' output_dir = os.path.join(self.outputFilePath, str(j.id)) assert os.path.isdir(output_dir) assert os.path.isfile(os.path.join(output_dir, j.inputfiles[0].namePattern)) self.cleanUp() def test_c_SplitJob(self): """Test submitting subjobs""" MassStorageFile = self.fileClass from Ganga.GPI import Job, LocalFile, ArgSplitter _ext = '.txt2' file_1 = generate_unique_temp_file(_ext) TestMassStorageWN._managed_files.append(file_1) j = Job() j.inputfiles = [LocalFile(file_1)] j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ]) j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.extendedFormat)] j.submit() for f in j.outputfiles: assert f.outputfilenameformat == self.extendedFormat def test_d_CompletedSJ(self): """Test that the subjobs ave completed""" from Ganga.GPI import jobs j = jobs[-1] assert sleep_until_completed(j) assert len(j.subjobs) == TestMassStorageWN.sj_len assert len(stripProxy(stripProxy(j.subjobs[0]).outputfiles[0]).subfiles) == 1 assert len(j.subjobs[0].outputfiles) == 1 for i in range(0, TestMassStorageWN.sj_len): output_dir = os.path.join(self.outputFilePath, str(j.id), str(i)) assert os.path.isdir(output_dir) # Check each inputfile has been placed in storage like we asked for _input_file in j.inputfiles: assert os.path.isfile(os.path.join(output_dir, _input_file.namePattern)) self.cleanUp() def test_e_MultipleFiles(self): """Test that the wildcards work""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter _ext = '.root' _ext2 = '.txt' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) file_3 = generate_unique_temp_file(_ext2) TestMassStorageWN._managed_files.append(file_1) TestMassStorageWN._managed_files.append(file_2) TestMassStorageWN._managed_files.append(file_3) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2), LocalFile(file_3)] j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ]) j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat='{jid}/{sjid}/{fname}'), MassStorageFile(namePattern='*'+_ext2)] j.submit() def test_f_MultiUpload(self): """Test that multiple 'uploads' work""" from Ganga.GPI import jobs j = jobs[-1] assert sleep_until_completed(j) assert len(j.subjobs) == TestMassStorageWN.sj_len for i in range(TestMassStorageWN.sj_len): # Check that the subfiles were expended correctly assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[0]).subfiles) == 2 assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[1]).subfiles) == 1 # Check we have the correct total number of files assert len(j.subjobs[i].outputfiles) == 3 output_dir = os.path.join(self.outputFilePath, str(j.id), str(i)) assert os.path.isdir(output_dir) # Checl all of the files were put into storage for file_ in j.inputfiles: assert os.path.isfile(os.path.join(output_dir, file_.namePattern)) self.cleanUp() def test_g_MultipleFiles(self): """Test that the wildcards work""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter _ext = '.root' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) TestMassStorageWN._managed_files.append(file_1) TestMassStorageWN._managed_files.append(file_2) j = Job() j.inputfiles = [LocalFile(file_1), LocalFile(file_2)] j.splitter = ArgSplitter(args = [[_] for _ in range(0, TestMassStorageWN.sj_len) ]) j.outputfiles = [MassStorageFile(namePattern='*'+_ext, outputfilenameformat=self.customOutputFormat)] for f in j.outputfiles: assert f.outputfilenameformat == self.customOutputFormat j.submit() for f in j.outputfiles: assert f.outputfilenameformat == self.customOutputFormat def test_h_MultiUpload(self): """Test that multiple 'uploads' work""" from Ganga.GPI import jobs j = jobs[-1] assert sleep_until_completed(j) assert len(j.subjobs) == TestMassStorageWN.sj_len for i in range(TestMassStorageWN.sj_len): # Check that we correctly have expanded the wildcard still assert len(stripProxy(stripProxy(j.subjobs[i]).outputfiles[0]).subfiles) == 2 assert len(j.subjobs[i].outputfiles) == 2 file_prep = os.path.join(self.outputFilePath, str(j.id) + '_' + str(i) + '_') # Check that the files were placed in the correct place on storage print("Found: %s" % str(os.listdir(self.outputFilePath))) for f in j.subjobs[i].outputfiles: assert f.outputfilenameformat == self.customOutputFormat for file_ in j.inputfiles: assert os.path.isfile(file_prep + file_.namePattern) self.cleanUp()
def _checkConfig(self): """ Check that the MassStorageFile configuration is correct """ if not getConfig('Output')[_getName(self)]['uploadOptions']['path'] : raise GangaException('Unable to create MassStorageFile. Check your configuration!')
def getName(_obj): """Strip any proxy and then return an objects name""" obj = stripProxy(_obj) returnable = _getName(obj) return returnable
def name(self): from Ganga.GPIDev.Base.Objects import _getName return _getName(self._pluginclass)
class TestMassStorageClientInput(GangaUnitTest): """Testing MassStorage on input to a job""" _managed_files = [] # Num of sj in tests sj_len = 3 fileClass = addProxy(MassStorageFile) # Where on local storage we want to have our 'MassStorage solution' outputFilePath = '/tmp/Test' + _getName(fileClass) + 'Input' # This sets up a MassStorageConfiguration which works by placing a file on local storage somewhere we can test using standard tools MassStorageTestConfig = { 'defaultProtocol': 'file://', 'fileExtensions': [''], 'uploadOptions': { 'path': outputFilePath, 'cp_cmd': 'cp', 'ls_cmd': 'ls', 'mkdir_cmd': 'mkdir -p' }, 'backendPostprocess': { 'LSF': 'client', 'LCG': 'client', 'ARC': 'client', 'Dirac': 'client', 'PBS': 'client', 'Interactive': 'client', 'Local': 'client', 'CREAM': 'client' } } def setUp(self): """ Configure the MassStorageFile for the test """ extra_opts = [('PollThread', 'autostart', 'False'), ('Local', 'remove_workdir', 'False'), ('TestingFramework', 'AutoCleanup', 'False'), ('Output', _getName(self.fileClass), self.MassStorageTestConfig), ('Output', 'FailJobIfNoOutputMatched', 'True')] super(TestMassStorageClientInput, self).setUp(extra_opts=extra_opts) @staticmethod def cleanUp(): """ Cleanup the current job objects """ from Ganga.GPI import jobs for j in jobs: shutil.rmtree(j.backend.workdir, ignore_errors=True) j.remove() @classmethod def setUpClass(cls): """ This creates a safe place to put the files into 'mass-storage' """ cls.outputFilePath = tempfile.mkdtemp() cls.MassStorageTestConfig['uploadOptions']['path'] = cls.outputFilePath @classmethod def tearDownClass(cls): """ Cleanup the current temp objects """ for file_ in cls._managed_files: os.unlink(file_) cls._managed_files = [] shutil.rmtree(cls.outputFilePath, ignore_errors=True) def test_a_testClientInputSubmit(self): """Test that a job can be submitted with inputfiles in the input""" MassStorageFile = self.fileClass from Ganga.GPI import LocalFile, Job, ArgSplitter _ext = '.root' file_1 = generate_unique_temp_file(_ext) file_2 = generate_unique_temp_file(_ext) self._managed_files.append(file_1) self._managed_files.append(file_2) msf_1 = MassStorageFile(file_1) msf_2 = MassStorageFile(file_2) msf_1.put() msf_2.put() j = Job() j.inputfiles = [msf_1, msf_2] j.splitter = ArgSplitter(args=[[_] for _ in range(self.sj_len)]) j.outputfiles = [LocalFile(namePattern='*' + _ext)] j.submit() def test_b_testClientInputComplete(self): """Test that the files were made accessible to the WN area and collected as LocalFile objects in outputfiles""" from Ganga.GPI import jobs j = jobs[-1] assert sleep_until_completed(j) for sj in j.subjobs: for file_ in j.inputfiles: assert os.path.isfile( os.path.join(sj.outputdir, file_.namePattern)) self.cleanUp()