def putFile( self, path, sourceSize = 0 ): client = RPCClient( self.url ) if sourceSize: gLogger.debug( "ProxyStorage.putFile: The client has provided the source file size implying a replication is requested." ) return client.callProxyMethod( self.name, 'putFile', path, {'sourceSize':sourceSize} ) gLogger.debug( "ProxyStorage.putFile: No source size was provided therefore a simple put will be performed." ) res = self.__checkArgumentFormatDict( path ) if not res['OK']: return res urls = res['Value'] failed = {} successful = {} client = RPCClient( self.url ) transferClient = TransferClient( self.url ) for dest_url, src_file in urls.items(): fileName = os.path.basename( dest_url ) res = transferClient.sendFile( src_file, 'putFile/%s' % fileName ) if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to send file to proxy server.", res['Message'] ) failed[dest_url] = res['Message'] else: res = client.uploadFile( self.name, src_file ) if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to upload file to storage element from proxy server.", res['Message'] ) failed[dest_url] = res['Message'] else: res = self.__executeOperation( dest_url, 'getFileSize' ) if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to determine destination file size.", res['Message'] ) failed[dest_url] = res['Message'] else: successful[dest_url] = res['Value'] resDict = {'Failed':failed, 'Successful':successful} return S_OK( resDict )
def getDirectory( self, path, localPath = False ): """ Get a local copy in the current directory of a physical file specified by its path """ res = checkArgumentFormat( path ) if not res['OK']: return res urls = res['Value'] failed = {} successful = {} gLogger.debug( "DIPStorage.getDirectory: Attempting to get local copies of %s directories." % len( urls ) ) transferClient = TransferClient( self.url ) for src_dir in urls: if localPath: dest_dir = localPath else: dest_dir = os.getcwd() if not os.path.exists( dest_dir ): os.mkdir( dest_dir ) res = transferClient.receiveBulk( dest_dir, src_dir ) if res['OK']: gLogger.debug( "DIPStorage.getDirectory: Successfully got local copy of %s" % src_dir ) successful[src_dir] = {'Files':0, 'Size':0} else: gLogger.error( "DIPStorage.getDirectory: Failed to get entire directory.", src_dir ) failed[src_dir] = res['Message'] resDict = {'Failed':failed, 'Successful':successful} return S_OK( resDict )
def web_getPlotImg(self): """ Get plot image """ callback = {} if 'file' not in self.request.arguments: callback = {"success":"false", "error":"Maybe you forgot the file?"} self.finish( callback ) return plotImageFile = str( self.request.arguments[ 'file' ][0] ) if plotImageFile.find( ".png" ) < -1: callback = {"success":"false", "error":"Not a valid image!"} self.finish( callback ) return transferClient = TransferClient( "Accounting/ReportGenerator" ) tempFile = tempfile.TemporaryFile() retVal = transferClient.receiveFile( tempFile, plotImageFile ) if not retVal[ 'OK' ]: callback = {"success":"false", "error":retVal[ 'Message' ]} self.finish( callback ) return tempFile.seek( 0 ) data = tempFile.read() self.set_header( 'Content-type', 'image/png' ) self.set_header( 'Content-Disposition', 'attachment; filename="%s.png"' % md5( plotImageFile ).hexdigest() ) self.set_header( 'Content-Length', len( data ) ) self.set_header( 'Content-Transfer-Encoding', 'Binary' ) #self.set_header( 'Cache-Control', "no-cache, no-store, must-revalidate, max-age=0" ) #self.set_header( 'Pragma', "no-cache" ) #self.set_header( 'Expires', ( datetime.datetime.utcnow() - datetime.timedelta( minutes = -10 ) ).strftime( "%d %b %Y %H:%M:%S GMT" ) ) self.finish( data )
def __putFile( self, src_file, dest_url ): res = pfnparse( src_file ) if not res['OK']: return res localCache = False srcDict = res['Value'] if srcDict['Protocol'] in ['dips', 'dip']: localCache = True srcSEURL = srcDict['Protocol'] + '://' + srcDict['Host'] + ':' + srcDict['Port'] + srcDict['WSUrl'] transferClient = TransferClient( srcSEURL ) res = transferClient.receiveFile( srcDict['FileName'], os.path.join( srcDict['Path'], srcDict['FileName'] ) ) if not res['OK']: return res src_file = srcDict['FileName'] if not os.path.exists( src_file ): errStr = "DIPStorage.__putFile: The source local file does not exist." gLogger.error( errStr, src_file ) return S_ERROR( errStr ) sourceSize = getSize( src_file ) if sourceSize == -1: errStr = "DIPStorage.__putFile: Failed to get file size." gLogger.error( errStr, src_file ) return S_ERROR( errStr ) transferClient = TransferClient( self.url ) res = transferClient.sendFile( src_file, dest_url, token = self.checkSum ) if localCache: os.unlink( src_file ) if res['OK']: return S_OK( sourceSize ) else: return res
def test_getPlot(self): tempFile = tempfile.TemporaryFile() transferClient = TransferClient('Monitoring/Monitoring') params = ( 'WMSHistory', 'NumberOfJobs', datetime( 2016, 3, 16, 12, 30, 0, 0), datetime( 2016, 3, 17, 19, 29, 0, 0), { 'grouping': ['Site']}, 'Site', {}) result = self.client.generateDelayedPlot(*params) self.assertTrue(result['OK']) result = transferClient.receiveFile(tempFile, result['Value']['plot']) self.assertTrue(result['OK'])
def getSandbox(self,jobID,output_dir=''): """ Get the job complete sandbox """ # Get the list of files in the sandbox sandbox_status = RPCClient('WorkloadManagement/%sSandbox' % self.sandbox_type,timeout=120) result = sandbox_status.getFileNames(jobID) if not result['OK']: return S_ERROR('Failed to get the list of file names') fileList = result['Value'] cwd = os.getcwd() if output_dir: os.chdir(os.path.realpath(output_dir)) error_files = [] decRes = 1 for f in fileList: sname = `jobID`+"::"+f sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.receiveFile(f,sname) if not result['OK']: error_files.append(f) else: if f.find('__Sandbox__.tar') != -1 or f.find('__Sandbox__.tgz') != -1 : if f.find('.bz') != -1: decRes = os.system('tar xjf '+f) elif f.find('.gz') != -1 or f.find('.tgz') != -1: decRes = os.system('tar xzf '+f) else: decRes = os.system('tar xf '+f) os.remove(f) if decRes != 0: return S_ERROR( "Could not decompress sandbox" ) if output_dir: os.chdir(cwd) if error_files: result = S_ERROR('Failed to download all the files') result['FailedFiles'] = error_files else: result = S_OK(fileList) # Set job retrieved flag jobState = RPCClient('WorkloadManagement/JobStateUpdate',timeout=120) jobState.setJobFlag(jobID,'RetrievedFlag') return result
def getSEDump(self, seName, outputFilename): """ Dump the content of an SE in the given file. The file contains a list of [lfn,checksum,size] dumped as csv, separated by '|' :param seName: name of the StorageElement :param outputFilename: path to the file where to dump it :returns: result from the TransferClient """ dfc = TransferClient(self.serverURL) return dfc.receiveFile(outputFilename, seName)
def __getFile( self, src_url, dest_file ): transferClient = TransferClient( self.url ) res = transferClient.receiveFile( dest_file, src_url, token = self.checkSum ) if not res['OK']: return res if not os.path.exists( dest_file ): errStr = "DIPStorage.__getFile: The destination local file does not exist." gLogger.error( errStr, dest_file ) return S_ERROR( errStr ) destSize = getSize( dest_file ) if destSize == -1: errStr = "DIPStorage.__getFile: Failed to get the local file size." gLogger.error( errStr, dest_file ) return S_ERROR( errStr ) return S_OK( destSize )
def test_getPlot(self): tempFile = tempfile.TemporaryFile() transferClient = TransferClient("Monitoring/Monitoring") params = ( "WMSHistory", "NumberOfJobs", datetime(2016, 3, 16, 12, 30, 0, 0), datetime(2016, 3, 17, 19, 29, 0, 0), {"grouping": ["Site"]}, "Site", {}, ) result = self.client.generateDelayedPlot(*params) self.assert_(result["OK"]) result = transferClient.receiveFile(tempFile, result["Value"]["plot"]) self.assert_(result["OK"])
def putFile(self, path, sourceSize=0): client = RPCClient(self.url) if sourceSize: gLogger.debug( "ProxyStorage.putFile: The client has provided the source file size implying a replication is requested." ) return client.callProxyMethod(self.name, 'putFile', path, {'sourceSize': sourceSize}) gLogger.debug( "ProxyStorage.putFile: No source size was provided therefore a simple put will be performed." ) res = self.__checkArgumentFormatDict(path) if not res['OK']: return res urls = res['Value'] failed = {} successful = {} client = RPCClient(self.url) transferClient = TransferClient(self.url) for dest_url, src_file in urls.items(): fileName = os.path.basename(dest_url) res = transferClient.sendFile(src_file, 'putFile/%s' % fileName) if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to send file to proxy server.", res['Message']) failed[dest_url] = res['Message'] else: res = client.uploadFile(self.name, src_file) if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to upload file to storage element from proxy server.", res['Message']) failed[dest_url] = res['Message'] else: res = self.__executeOperation(dest_url, 'getFileSize') if not res['OK']: gLogger.error( "ProxyStorage.putFile: Failed to determine destination file size.", res['Message']) failed[dest_url] = res['Message'] else: successful[dest_url] = res['Value'] resDict = {'Failed': failed, 'Successful': successful} return S_OK(resDict)
def getFile(self, path, localPath=False): res = checkArgumentFormat(path) if not res['OK']: return res urls = res['Value'] failed = {} successful = {} client = RPCClient(self.url) # Make sure transferClient uses the same ProxyStorage instance. # Only the this one holds the file we want to transfer. transferClient = TransferClient(client.serviceURL) for src_url in urls.keys(): res = client.prepareFile(self.name, src_url) if not res['OK']: gLogger.error( "ProxyStorage.getFile: Failed to prepare file on remote server.", res['Message']) failed[src_url] = res['Message'] else: fileName = os.path.basename(src_url) if localPath: dest_file = "%s/%s" % (localPath, fileName) else: dest_file = "%s/%s" % (os.getcwd(), fileName) res = transferClient.receiveFile(dest_file, 'getFile/%s' % fileName) if not res['OK']: gLogger.error( "ProxyStorage.getFile: Failed to recieve file from proxy server.", res['Message']) failed[src_url] = res['Message'] elif not os.path.exists(dest_file): errStr = "ProxyStorage.getFile: The destination local file does not exist." gLogger.error(errStr, dest_file) failed[src_url] = errStr else: destSize = getSize(dest_file) if destSize == -1: errStr = "ProxyStorage.getFile: Failed to get the local file size." gLogger.error(errStr, dest_file) failed[src_url] = errStr else: successful[src_url] = destSize resDict = {'Failed': failed, 'Successful': successful} return S_OK(resDict)
def __getTransferClient(self): if self.__transferClient: return self.__transferClient else: return TransferClient(self.__serviceName, useCertificates=self.__useCertificates, delegatedGroup=self.__delegatedGroup, delegatedDN=self.__delegatedDN, setup=self.__setup)
def __getTransferClient(self): """ Get transfer client :return: TransferClient() """ if self.transferClient: return self.transferClient return TransferClient("Framework/BundleDelivery", skipCACheck=skipCACheck())
def getFiles(self, in_dict): """ It returns a list of files for a given condition. :param dict in_dict: contains a given conditions :return: list of files """ in_dict = dict(in_dict) bkk = TransferClient('Bookkeeping/BookkeepingManager') in_dict['MethodName'] = 'getFiles' params = JEncoder.dumps(in_dict) file_name = tempfile.NamedTemporaryFile() retVal = bkk.receiveFile(file_name.name, params) if not retVal['OK']: return retVal else: value = JEncoder.load(open(file_name.name)) file_name.close() return value
def sendFiles(self, jobID, fileList, sizeLimit=0): """ Send files in the fileList to a Sandbox service for the given jobID. This is the preferable method to upload sandboxes. fileList can contain both files and directories """ print "sendFiles: sizeLimit =", sizeLimit error_files = [] files_to_send = [] for file in fileList: if re.search('^lfn:', file) or re.search('^LFN:', file): pass else: if os.path.exists(file): files_to_send.append(file) else: error_files.append(file) if error_files: return S_ERROR('Failed to locate files: \n' + string.join(error_files, ',')) if sizeLimit > 0: # Evaluate the compressed size of the sandbox if getGlobbedTotalSize(files_to_send) > sizeLimit: tname = 'Sandbox_' + str(jobID) + '.tar.gz' import tarfile tarFile = tarfile.open(tname, 'w:gz') for file in files_to_send: tarFile.add(file) tarFile.close() result = S_ERROR('Size over the limit') result['SandboxFileName'] = tname return result sendName = str(jobID) + "::Job__Sandbox__" sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendBulk(files_to_send, sendName) return result
def getFilesWithMetadata(in_dict): """ It is used for retrieving a files with meta data for a given condition. :param dict in_dict: It can contains the following conditions:'ConfigName', 'ConfigVersion', 'ConditionDescription', 'EventType', 'ProcessingPass','Production','RunNumber', 'FileType', DataQuality, StartDate, EndDate :return: files with meta data associated """ in_dict = dict(in_dict) bkk = TransferClient('Bookkeeping/BookkeepingManager') params = JEncoder.dumps(in_dict) file_name = tempfile.NamedTemporaryFile() retVal = bkk.receiveFile(file_name.name, params) if not retVal['OK']: return retVal else: value = JEncoder.load(open(file_name.name)) file_name.close() return S_OK(value)
def sendFile(self,jobID,fname): """ Send a file specified by fname to Sandbox service for job with jobID """ if os.path.exists(fname): if os.path.isdir(fname): dname = os.path.dirname(fname) bname = os.path.basename(fname) bzname = bname+'.tar.gz' if dname: comm = 'tar czf '+bzname+' -C '+dname+' '+bname else: comm = 'tar czf '+bzname+' '+bname result = shellCall(0,comm) if not result['OK'] or result['Value'][0]: return S_ERROR('Failed to send directory '+fname) sendName = `jobID`+"::"+bzname sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendFile(sendName,bzname) if not result['OK']: gLogger.error('Failed to send directory '+bzname+' to Sandbox service for job '+`jobID`) os.remove(bzname) return result os.remove(bzname) else: # This is a file bname = os.path.basename(fname) sendName = `jobID`+"::"+bname sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendFile(bname, sendName) if not result['OK']: gLogger.error('Failed to send file '+bname+' to Sandbox service for job '+`jobID`) return result # We are done OK return S_OK() else: gLogger.error("Can't find file "+ fname) return S_ERROR("Can't find file "+ fname)
def __putFile(self, src_file, dest_url): res = pfnparse(src_file) if not res['OK']: return res localCache = False srcDict = res['Value'] if srcDict['Protocol'] in ['dips', 'dip']: # Make the service URL from the file URL by stripping off the file part serviceDict = dict(srcDict) serviceDict['Path'] = '/'.join(srcDict['Path'].split('/')[:3]) serviceDict['FileName'] = '' res = pfnunparse(serviceDict) if not res['OK']: return res srcSEURL = res['Value'] localCache = True transferClient = TransferClient(srcSEURL) res = transferClient.receiveFile( srcDict['FileName'], os.path.join( srcDict['Path'], srcDict['FileName'])) if not res['OK']: return res src_file = srcDict['FileName'] if not os.path.exists(src_file): errStr = "DIPStorage.__putFile: The source local file does not exist." gLogger.error(errStr, src_file) return S_ERROR(errStr) sourceSize = getSize(src_file) if sourceSize == -1: errStr = "DIPStorage.__putFile: Failed to get file size." gLogger.error(errStr, src_file) return S_ERROR(errStr) transferClient = TransferClient(self.url) res = transferClient.sendFile(src_file, dest_url, token=self.checkSum) if localCache: os.unlink(src_file) if res['OK']: return S_OK(sourceSize) else: return res
def putDirectory( self, path ): """ Put a local directory to the physical storage together with all its files and subdirectories. """ res = checkArgumentFormat( path ) if not res['OK']: return res urls = res['Value'] successful = {} failed = {} gLogger.debug( "DIPStorage.putDirectory: Attemping to put %s directories to remote storage." % len( urls ) ) transferClient = TransferClient( self.url ) for destDir, sourceDir in urls.items(): tmpList = os.listdir( sourceDir ) sourceFiles = [ "%s/%s" % ( sourceDir, x ) for x in tmpList ] res = transferClient.sendBulk( sourceFiles, destDir ) if res['OK']: successful[destDir] = {'Files':0, 'Size':0} else: failed[destDir] = res['Message'] resDict = {'Failed':failed, 'Successful':successful} return S_OK( resDict )
def sendFiles(self,jobID,fileList,sizeLimit=0): """ Send files in the fileList to a Sandbox service for the given jobID. This is the preferable method to upload sandboxes. fileList can contain both files and directories """ print "sendFiles: sizeLimit =", sizeLimit error_files = [] files_to_send = [] for file in fileList: if re.search('^lfn:',file) or re.search('^LFN:',file): pass else: if os.path.exists(file): files_to_send.append(file) else: error_files.append(file) if error_files: return S_ERROR('Failed to locate files: \n'+string.join(error_files,',')) if sizeLimit > 0: # Evaluate the compressed size of the sandbox if getGlobbedTotalSize( files_to_send ) > sizeLimit: tname = 'Sandbox_'+str(jobID)+'.tar.gz' import tarfile tarFile = tarfile.open( tname, 'w:gz' ) for file in files_to_send: tarFile.add( file ) tarFile.close() result = S_ERROR('Size over the limit') result['SandboxFileName'] = tname return result sendName = str(jobID)+"::Job__Sandbox__" sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendBulk(files_to_send,sendName) return result
def getFile( self, path, localPath = False ): res = checkArgumentFormat( path ) if not res['OK']: return res urls = res['Value'] failed = {} successful = {} client = RPCClient( self.url ) # Make sure transferClient uses the same ProxyStorage instance. # Only the this one holds the file we want to transfer. transferClient = TransferClient( client.serviceURL ) for src_url in urls.keys(): res = client.prepareFile( self.name, src_url ) if not res['OK']: gLogger.error( "ProxyStorage.getFile: Failed to prepare file on remote server.", res['Message'] ) failed[src_url] = res['Message'] else: fileName = os.path.basename( src_url ) if localPath: dest_file = "%s/%s" % ( localPath, fileName ) else: dest_file = "%s/%s" % ( os.getcwd(), fileName ) res = transferClient.receiveFile( dest_file, 'getFile/%s' % fileName ) if not res['OK']: gLogger.error( "ProxyStorage.getFile: Failed to recieve file from proxy server.", res['Message'] ) failed[src_url] = res['Message'] elif not os.path.exists( dest_file ): errStr = "ProxyStorage.getFile: The destination local file does not exist." gLogger.error( errStr, dest_file ) failed[src_url] = errStr else: destSize = getSize( dest_file ) if destSize == -1: errStr = "ProxyStorage.getFile: Failed to get the local file size." gLogger.error( errStr, dest_file ) failed[src_url] = errStr else: successful[src_url] = destSize resDict = {'Failed':failed, 'Successful':successful} return S_OK( resDict )
def __putFile(self, src_file, dest_url): res = pfnparse(src_file) if not res['OK']: return res localCache = False srcDict = res['Value'] if srcDict['Protocol'] in ['dips', 'dip']: localCache = True srcSEURL = srcDict['Protocol'] + '://' + srcDict[ 'Host'] + ':' + srcDict['Port'] + srcDict['WSUrl'] transferClient = TransferClient(srcSEURL) res = transferClient.receiveFile( srcDict['FileName'], os.path.join(srcDict['Path'], srcDict['FileName'])) if not res['OK']: return res src_file = srcDict['FileName'] if not os.path.exists(src_file): errStr = "DIPStorage.__putFile: The source local file does not exist." gLogger.error(errStr, src_file) return S_ERROR(errStr) sourceSize = getSize(src_file) if sourceSize == -1: errStr = "DIPStorage.__putFile: Failed to get file size." gLogger.error(errStr, src_file) return S_ERROR(errStr) transferClient = TransferClient(self.url) res = transferClient.sendFile(src_file, dest_url, token=self.checkSum) if localCache: os.unlink(src_file) if res['OK']: return S_OK(sourceSize) else: return res
def web_getPlotImgFromCache(self): """ Get plot image from cache. """ callback = {} if 'file' not in self.request.arguments: callback = {"success": "false", "error": "Maybe you forgot the file?"} self.finish(callback) return plotImageFile = str(self.request.arguments['file'][0]) retVal = extractRequestFromFileId(plotImageFile) if not retVal['OK']: callback = {"success": "false", "error": retVal['Value']} self.finish(callback) return fields = retVal['Value'] if "extraArgs" in fields: # in order to get the plot from the cache we have to clean the extraArgs... plotTitle = "" if 'plotTitle' in fields["extraArgs"]: plotTitle = fields["extraArgs"]["plotTitle"] fields["extraArgs"] = {} fields["extraArgs"]["plotTitle"] = plotTitle else: fields["extraArgs"] = {} retVal = codeRequestInFileId(fields) if not retVal['OK']: callback = {"success": "false", "error": retVal['Value']} self.finish(callback) return plotImageFile = retVal['Value']['plot'] transferClient = TransferClient("Monitoring/Monitoring") tempFile = tempfile.TemporaryFile() retVal = yield self.threadTask(transferClient.receiveFile, tempFile, plotImageFile) if not retVal['OK']: callback = {"success": "false", "error": retVal['Message']} self.finish(callback) return tempFile.seek(0) data = tempFile.read() self.set_header('Content-type', 'image/png') self.set_header('Content-Disposition', 'attachment; filename="%s.png"' % md5(plotImageFile).hexdigest()) self.set_header('Content-Length', len(data)) self.set_header('Content-Transfer-Encoding', 'Binary') self.set_header('Cache-Control', "no-cache, no-store, must-revalidate, max-age=0") self.set_header('Pragma', "no-cache") self.set_header( 'Expires', (datetime.datetime.utcnow() - datetime.timedelta(minutes=-10)).strftime("%d %b %Y %H:%M:%S GMT")) self.finish(data)
def web_getPlotImg(self): """ Get plot image """ callback = {} if 'file' not in self.request.arguments: callback = { "success": "false", "error": "Maybe you forgot the file?" } self.finish(json.dumps(callback)) return plotImageFile = str(self.request.arguments['file'][0]) if plotImageFile.find(".png") < -1: callback = {"success": "false", "error": "Not a valid image!"} self.finish(json.dumps(callback)) return transferClient = TransferClient("Accounting/ReportGenerator") tempFile = tempfile.TemporaryFile() retVal = yield self.threadTask(transferClient.receiveFile, tempFile, plotImageFile) if not retVal['OK']: callback = {"success": "false", "error": retVal['Message']} self.finish(json.dumps(callback)) return tempFile.seek(0) data = tempFile.read() self.set_header('Content-type', 'image/png') self.set_header( 'Content-Disposition', 'attachment; filename="%s.png"' % md5(plotImageFile).hexdigest()) self.set_header('Content-Length', len(data)) self.set_header('Content-Transfer-Encoding', 'Binary') self.set_header('Cache-Control', "no-cache, no-store, must-revalidate, max-age=0") self.set_header('Pragma', "no-cache") self.set_header('Expires', ( datetime.datetime.utcnow() - datetime.timedelta(minutes=-10)).strftime("%d %b %Y %H:%M:%S GMT")) self.finish(data)
def sendFile(self, jobID, fname): """ Send a file specified by fname to Sandbox service for job with jobID """ if os.path.exists(fname): if os.path.isdir(fname): dname = os.path.dirname(fname) bname = os.path.basename(fname) bzname = bname + '.tar.gz' if dname: comm = 'tar czf ' + bzname + ' -C ' + dname + ' ' + bname else: comm = 'tar czf ' + bzname + ' ' + bname result = shellCall(0, comm) if not result['OK'] or result['Value'][0]: return S_ERROR('Failed to send directory ' + fname) sendName = ` jobID ` + "::" + bzname sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendFile(sendName, bzname) if not result['OK']: gLogger.error('Failed to send directory ' + bzname + ' to Sandbox service for job ' + ` jobID `) os.remove(bzname) return result os.remove(bzname) else: # This is a file bname = os.path.basename(fname) sendName = ` jobID ` + "::" + bname sandbox = TransferClient('WorkloadManagement/%sSandbox' % self.sandbox_type) result = sandbox.sendFile(bname, sendName) if not result['OK']: gLogger.error('Failed to send file ' + bname + ' to Sandbox service for job ' + ` jobID `) return result # We are done OK return S_OK() else: gLogger.error("Can't find file " + fname) return S_ERROR("Can't find file " + fname)
def __putFile(self, src_file, dest_url): res = pfnparse(src_file) if not res['OK']: return res localCache = False srcDict = res['Value'] if srcDict['Protocol'] in ['dips', 'dip']: # Make the service URL from the file URL by stripping off the file part serviceDict = dict(srcDict) serviceDict['Path'] = '/'.join(srcDict['Path'].split('/')[:3]) serviceDict['FileName'] = '' res = pfnunparse(serviceDict) if not res['OK']: return res srcSEURL = res['Value'] localCache = True transferClient = TransferClient(srcSEURL) res = transferClient.receiveFile( srcDict['FileName'], os.path.join(srcDict['Path'], srcDict['FileName'])) if not res['OK']: return res src_file = srcDict['FileName'] if not os.path.exists(src_file): errStr = "DIPStorage.__putFile: The source local file does not exist." gLogger.error(errStr, src_file) return S_ERROR(errStr) sourceSize = getSize(src_file) if sourceSize == -1: errStr = "DIPStorage.__putFile: Failed to get file size." gLogger.error(errStr, src_file) return S_ERROR(errStr) transferClient = TransferClient(self.url) res = transferClient.sendFile(src_file, dest_url, token=self.checkSum) if localCache: os.unlink(src_file) if res['OK']: return S_OK(sourceSize) else: return res
def __getTransferClient(self): if not self.transferClient: return TransferClient('Accounting/ReportGenerator') else: return self.transferClient
def __getTransferClient(self): """ Get RPC client for TransferClient """ if self.__transferClient: return self.__transferClient else: return TransferClient(self.__serviceName, **self.__kwargs)
def __getTransferClient(self): if self.transferClient: return self.transferClient return TransferClient("Framework/BundleDelivery", skipCACheck=CS.skipCACheck())
def __getTransferClient( self ): if self.transferClient: return self.transferClient return TransferClient( self.serviceName )
def __getTransferClient( self ): if not self.transferClient: return TransferClient( self.serviceName ) else: return self.transferClient
def __getTransferClient(self): if self.__transferClient: return self.__transferClient else: return TransferClient(self.__serviceName, **self.__kwargs)
def getTransferClient(*args, **kwargs): kwargs = __prepareArgs(kwargs) return TransferClient(*args, **kwargs)