def getFile(lfn, se=""): dm = DataManager() download_ok = 0 get_active_replicas_ok = False lfn_on_se = False error_msg = "" if se: for i in range(0, 5): result = dm.getActiveReplicas(lfn) if result["OK"] and result["Value"]["Successful"]: get_active_replicas_ok = True lfnReplicas = result["Value"]["Successful"] if se in lfnReplicas[lfn]: lfn_on_se = True break time.sleep(3) print "- Get replicas for %s failed, try again" % lfn if not get_active_replicas_ok: return S_ERROR("Get replicas error: %s" % lfn) if lfn_on_se: se = StorageElement(se) # try 5 times for j in range(0, 5): result = se.getFile(lfn) if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): break time.sleep(random.randint(180, 600)) print "- %s getStorageFile(%s) failed, try again" % (lfn, se) if result["OK"]: if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): download_ok = 1 else: error_msg = "Downloading %s from SE %s error!" % (lfn, se) else: error_msg = result["Message"] else: if se: print 'File %s not found on SE "%s" after %s tries, trying other SE' % (lfn, se, i + 1) # try 5 times for j in range(0, 5): result = dm.getFile(lfn) if result["OK"] and result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): break time.sleep(random.randint(180, 600)) print "- getFile(%s) failed, try again" % lfn if result["OK"]: if result["Value"]["Successful"] and result["Value"]["Successful"].has_key(lfn): download_ok = 2 else: error_msg = "Downloading %s from random SE error!" % lfn else: error_msg = result["Message"] if download_ok: return S_OK({lfn: {"DownloadOK": download_ok, "Retry": j + 1}}) return S_ERROR(error_msg)
def pushNewResults(): #cd to temp folder to temporary save the zip files os.chdir(temp_save_path) logger.warning('Checking results directory for new added zip files...') from DIRAC.Core.Base.Script import initialize #from DIRAC import gLogger #gLogger.setLevel("DEBUG") initialize(ignoreErrors = True, enableCommandLine = False) from DIRAC.Resources.Storage.StorageElement import StorageElement statSE = StorageElement(diracStorageElementName) #print diracStorageElementFolder print "Before listDirectory" dirDict = statSE.listDirectory(diracStorageElementFolder) print "After listDirectory" print dirDict for zipResult in dirDict['Value']['Successful'][diracStorageElementFolder]['Files']: fileName, fileExtension = os.path.splitext(zipResult) #get the File, copy the file to the current local directory res = statSE.getFile(os.path.join(diracStorageElementFolder, zipResult)) if not res['OK'] or ( res['OK'] and len(res['Value']['Failed']) > 0): logger.errot("Failed download of " + zipResult) continue results_list = AddedResults.objects.filter(identifier__exact=fileName) res = True if not results_list: logger.info('New zip: {0}, found in results directory, calling pushZip command...'.format(zipResult)) res = pushZip.pushThis(os.path.join(temp_save_path, zipResult )) if not res: logger.error("Error pushing results, not removing") continue #remove it from the upload_test folder statSE.removeFile(os.path.join(diracStorageElementFolder, zipResult)) #put the file into the added folder statSE.putFile({ os.path.join(addedDiracStorageFolder, zipResult) : zipResult}) #also remove the file from the current directory os.remove(os.path.join(temp_save_path, zipResult))
if os.path.exists(getFileDir): try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.", getFileDir, x ) os.mkdir(getFileDir) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception( errStr, se, str(x) ) return S_ERROR(errStr) res = storageElement.getFile( pfn, "%s/getFile" % BASE_PATH, True ) if not res['OK']: gLogger.error( "prepareFile: Failed to get local copy of file.", res['Message'] ) return res return S_OK() types_prepareFileForHTTP = [ list(StringTypes)+[ListType] ] def export_prepareFileForHTTP(self, lfn): """ This method simply gets the file to the local storage area using LFN """ # Do clean-up, should be a separate regular thread gRegister.purgeExpired() key = str( random.getrandbits( 128 ) ) result = pythonCall( 300, self.__prepareFileForHTTP, lfn, key )
if os.path.exists(getFileDir): try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.", getFileDir, x ) os.mkdir(getFileDir) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception( errStr, se, str(x) ) return S_ERROR(errStr) res = returnSingleResult( storageElement.getFile( pfn, localPath = "%s/getFile" % BASE_PATH ) ) if not res['OK']: gLogger.error( "prepareFile: Failed to get local copy of file.", res['Message'] ) return res return S_OK() types_prepareFileForHTTP = [ list(StringTypes)+[ListType] ] def export_prepareFileForHTTP(self, lfn): """ This method simply gets the file to the local storage area using LFN """ # Do clean-up, should be a separate regular thread gRegister.purgeExpired() key = str( random.getrandbits( 128 ) ) result = pythonCall( 300, self.__prepareFileForHTTP, lfn, key )
# If its defined ensure the dir structure is there if not destinationDir: destinationDir = os.getcwd() else: try: os.makedirs( destinationDir ) except: pass try: tmpSBDir = tempfile.mkdtemp( prefix = "TMSB." ) except Exception, e: return S_ERROR( "Cannot create temporal file: %s" % str( e ) ) se = StorageElement( SEName ) result = returnSingleResult( se.getFile( SEPFN, localPath = tmpSBDir ) ) if not result[ 'OK' ]: return result sbFileName = os.path.basename( SEPFN ) result = S_OK() tarFileName = os.path.join( tmpSBDir, sbFileName ) if inMemory: try: tfile = open( tarFileName, 'r' ) data = tfile.read() tfile.close() os.unlink( tarFileName ) os.rmdir( tmpSBDir )
def downloadSandbox(self, sbLocation, destinationDir="", inMemory=False, unpack=True): """ Download a sandbox file and keep it in bundled form """ if sbLocation.find("SB:") != 0: return S_ERROR("Invalid sandbox URL") sbLocation = sbLocation[3:] sbSplit = sbLocation.split("|") if len(sbSplit) < 2: return S_ERROR("Invalid sandbox URL") seName = sbSplit[0] sePFN = "|".join(sbSplit[1:]) try: tmpSBDir = tempfile.mkdtemp(prefix="TMSB.") except IOError as e: return S_ERROR("Cannot create temporary file: %s" % repr(e)) se = StorageElement(seName, vo=self.__vo) result = returnSingleResult(se.getFile(sePFN, localPath=tmpSBDir)) if not result['OK']: return result sbFileName = os.path.basename(sePFN) result = S_OK() tarFileName = os.path.join(tmpSBDir, sbFileName) if inMemory: try: with open(tarFileName, 'r') as tfile: data = tfile.read() except IOError as e: return S_ERROR('Failed to read the sandbox archive: %s' % repr(e)) finally: os.unlink(tarFileName) os.rmdir(tmpSBDir) return S_OK(data) # If destination dir is not specified use current working dir # If its defined ensure the dir structure is there if not destinationDir: destinationDir = os.getcwd() else: mkDir(destinationDir) if not unpack: result['Value'] = tarFileName return result try: sandboxSize = 0 with tarfile.open(name=tarFileName, mode="r") as tf: for tarinfo in tf: tf.extract(tarinfo, path=destinationDir) sandboxSize += tarinfo.size # FIXME: here we return the size, but otherwise we always return the location: inconsistent # FIXME: looks like this size is used by the JobWrapper result['Value'] = sandboxSize except IOError as e: result = S_ERROR("Could not open bundle: %s" % repr(e)) try: os.unlink(tarFileName) os.rmdir(tmpSBDir) except OSError as e: gLogger.warn("Could not remove temporary dir %s: %s" % (tmpSBDir, repr(e))) return result
getFileDir = "%s/getFile" % base_path if os.path.exists(getFileDir): try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.",getFileDir,x) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception(errStr,se,str(x)) return S_ERROR(errStr) res = storageElement.getFile(pfn,"%s/getFile" % base_path,True) if not res['OK']: gLogger.error("prepareFile: Failed to get local copy of file.",res['Message']) return res return S_OK() types_prepareFileForHTTP = [ list(StringTypes)+[ListType] ] def export_prepareFileForHTTP(self, lfn): """ This method simply gets the file to the local storage area using LFN """ # Do clean-up, should be a separate regular thread gRegister.purgeExpired() key = str( random.getrandbits( 128 ) ) result = pythonCall( 0,self.__prepareFileForHTTP,lfn,key )
def downloadSandbox( self, sbLocation, destinationDir = "", inMemory = False, unpack = True ): """ Download a sandbox file and keep it in bundled form """ if sbLocation.find( "SB:" ) != 0: return S_ERROR( "Invalid sandbox URL" ) sbLocation = sbLocation[ 3: ] sbSplit = sbLocation.split( "|" ) if len( sbSplit ) < 2: return S_ERROR( "Invalid sandbox URL" ) SEName = sbSplit[0] SEPFN = "|".join( sbSplit[1:] ) # If destination dir is not specified use current working dir # If its defined ensure the dir structure is there if not destinationDir: destinationDir = os.getcwd() else: mkDir(destinationDir) try: tmpSBDir = tempfile.mkdtemp( prefix = "TMSB." ) except Exception as e: return S_ERROR( "Cannot create temporal file: %s" % str( e ) ) se = StorageElement( SEName, vo = self.__vo ) result = returnSingleResult( se.getFile( SEPFN, localPath = tmpSBDir ) ) if not result[ 'OK' ]: return result sbFileName = os.path.basename( SEPFN ) result = S_OK() tarFileName = os.path.join( tmpSBDir, sbFileName ) if inMemory: try: tfile = open( tarFileName, 'r' ) data = tfile.read() tfile.close() os.unlink( tarFileName ) os.rmdir( tmpSBDir ) except Exception as e: os.unlink( tarFileName ) os.rmdir( tmpSBDir ) return S_ERROR( 'Failed to read the sandbox archive: %s' % str( e ) ) return S_OK( data ) if not unpack: result[ 'Value' ] = tarFileName return result try: sandboxSize = 0 tf = tarfile.open( name = tarFileName, mode = "r" ) for tarinfo in tf: tf.extract( tarinfo, path = destinationDir ) sandboxSize += tarinfo.size tf.close() result[ 'Value' ] = sandboxSize except Exception as e: result = S_ERROR( "Could not open bundle: %s" % str( e ) ) try: os.unlink( tarFileName ) os.rmdir( tmpSBDir ) except Exception as e: gLogger.warn( "Could not remove temporary dir %s: %s" % ( tmpSBDir, str( e ) ) ) return result
def downloadSandbox(self, sbLocation, destinationDir="", inMemory=False, unpack=True): """ Download a sandbox file and keep it in bundled form """ if sbLocation.find("SB:") != 0: return S_ERROR("Invalid sandbox URL") sbLocation = sbLocation[3:] sbSplit = sbLocation.split("|") if len(sbSplit) < 2: return S_ERROR("Invalid sandbox URL") seName = sbSplit[0] sePFN = "|".join(sbSplit[1:]) try: tmpSBDir = tempfile.mkdtemp(prefix="TMSB.") except IOError as e: return S_ERROR("Cannot create temporary file: %s" % repr(e)) se = StorageElement(seName, vo=self.__vo) result = returnSingleResult(se.getFile(sePFN, localPath=tmpSBDir)) if not result["OK"]: return result sbFileName = os.path.basename(sePFN) result = S_OK() tarFileName = os.path.join(tmpSBDir, sbFileName) if inMemory: try: with open(tarFileName, "rb") as tfile: data = tfile.read() except IOError as e: return S_ERROR("Failed to read the sandbox archive: %s" % repr(e)) finally: os.unlink(tarFileName) os.rmdir(tmpSBDir) return S_OK(data) # If destination dir is not specified use current working dir # If its defined ensure the dir structure is there if not destinationDir: destinationDir = os.getcwd() else: mkDir(destinationDir) if not unpack: result["Value"] = tarFileName return result try: sandboxSize = 0 with tarfile.open(name=tarFileName, mode="r") as tf: for tarinfo in tf: tf.extract(tarinfo, path=destinationDir) sandboxSize += tarinfo.size # FIXME: here we return the size, but otherwise we always return the location: inconsistent # FIXME: looks like this size is used by the JobWrapper result["Value"] = sandboxSize except IOError as e: result = S_ERROR("Could not open bundle: %s" % repr(e)) try: os.unlink(tarFileName) os.rmdir(tmpSBDir) except OSError as e: gLogger.warn("Could not remove temporary dir %s: %s" % (tmpSBDir, repr(e))) return result
if os.path.exists(getFileDir): try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.", getFileDir, x ) os.mkdir(getFileDir) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception( errStr, se, str(x) ) return S_ERROR(errStr) res = storageElement.getFile( pfn, localPath = "%s/getFile" % BASE_PATH, singleFile = True ) if not res['OK']: gLogger.error( "prepareFile: Failed to get local copy of file.", res['Message'] ) return res return S_OK() types_prepareFileForHTTP = [ list(StringTypes)+[ListType] ] def export_prepareFileForHTTP(self, lfn): """ This method simply gets the file to the local storage area using LFN """ # Do clean-up, should be a separate regular thread gRegister.purgeExpired() key = str( random.getrandbits( 128 ) ) result = pythonCall( 300, self.__prepareFileForHTTP, lfn, key )
def downloadSandbox(self, sbLocation, destinationDir="", inMemory=False, unpack=True): """ Download a sandbox file and keep it in bundled form """ if sbLocation.find("SB:") != 0: return S_ERROR("Invalid sandbox URL") sbLocation = sbLocation[3:] sbSplit = sbLocation.split("|") if len(sbSplit) < 2: return S_ERROR("Invalid sandbox URL") SEName = sbSplit[0] SEPFN = "|".join(sbSplit[1:]) try: tmpSBDir = tempfile.mkdtemp(prefix="TMSB.") except Exception as e: return S_ERROR("Cannot create temporal file: %s" % str(e)) se = StorageElement(SEName, vo=self.__vo) result = returnSingleResult(se.getFile(SEPFN, localPath=tmpSBDir)) if not result['OK']: return result sbFileName = os.path.basename(SEPFN) result = S_OK() tarFileName = os.path.join(tmpSBDir, sbFileName) if inMemory: try: tfile = open(tarFileName, 'r') data = tfile.read() tfile.close() os.unlink(tarFileName) os.rmdir(tmpSBDir) except Exception as e: os.unlink(tarFileName) os.rmdir(tmpSBDir) return S_ERROR('Failed to read the sandbox archive: %s' % str(e)) return S_OK(data) # If destination dir is not specified use current working dir # If its defined ensure the dir structure is there if not destinationDir: destinationDir = os.getcwd() else: mkDir(destinationDir) if not unpack: result['Value'] = tarFileName return result try: sandboxSize = 0 tf = tarfile.open(name=tarFileName, mode="r") for tarinfo in tf: tf.extract(tarinfo, path=destinationDir) sandboxSize += tarinfo.size tf.close() result['Value'] = sandboxSize except Exception as e: result = S_ERROR("Could not open bundle: %s" % str(e)) try: os.unlink(tarFileName) os.rmdir(tmpSBDir) except Exception as e: gLogger.warn("Could not remove temporary dir %s: %s" % (tmpSBDir, str(e))) return result
getFileDir = "%s/getFile" % base_path if os.path.exists(getFileDir): try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.",getFileDir,x) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception(errStr,se,str(x)) return S_ERROR(errStr) res = storageElement.getFile(pfn,"%s/getFile" % base_path,True) if not res['OK']: gLogger.error("prepareFile: Failed to get local copy of file.",res['Message']) return res return S_OK() ############################################################ # # This is the method to setup the proxy and configure the environment with the client credential # def __prepareSecurityDetails(self): """ Obtains the connection details for the client """ try: clientDN = self._clientTransport.peerCredentials['DN']
try: shutil.rmtree(getFileDir) gLogger.debug("Cleared existing getFile cache") except Exception, x: gLogger.exception("Failed to remove destination directory.", getFileDir, x) os.mkdir(getFileDir) # Get the file to the cache try: storageElement = StorageElement(se) except AttributeError, x: errStr = "prepareFile: Exception while instantiating the Storage Element." gLogger.exception(errStr, se, str(x)) return S_ERROR(errStr) res = storageElement.getFile(pfn, "%s/getFile" % BASE_PATH, True) if not res['OK']: gLogger.error("prepareFile: Failed to get local copy of file.", res['Message']) return res return S_OK() types_prepareFileForHTTP = [list(StringTypes) + [ListType]] def export_prepareFileForHTTP(self, lfn): """ This method simply gets the file to the local storage area using LFN """ # Do clean-up, should be a separate regular thread gRegister.purgeExpired()