def __init__(self, fromDict=None): """c'tor :param self: self reference :param dict fromDict: data dict """ Record.__init__(self) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self.__files__ = TypedList(allowedTypes=FTSFile) self._log = gLogger.getSubLogger("FTSJob-%s" % self.FTSJobID, True) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get("FTSFiles", []): self += FTSFile(ftsFileDict) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown FTSJob attribute '%s'" % key) if value: setattr(self, key, value)
def __init__( self, fromDict = None ): """c'tor :param self: self reference :param dict fromDict: data dict """ Record.__init__( self ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self.__files__ = TypedList( allowedTypes = FTSFile ) self._log = gLogger.getSubLogger( "FTSJob-%s" % self.FTSJobID , True ) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get( "FTSFiles", [] ): self +=FTSFile( ftsFileDict ) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown FTSJob attribute '%s'" % key ) if value: setattr( self, key, value )
def __init__( self, fromDict = None ): """c'tor :param self: self reference :param dict fromDict: data dict """ self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList( allowedTypes = FTSFile ) self._fc = FileCatalog() self._states = tuple( set( self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES ) ) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get( "FTSFiles", [] ): self +=FTSFile( ftsFileDict ) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown FTSJob attribute '%s'" % key ) if value: setattr( self, key, value ) self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % ( self.RequestID, self.FTSGUID ) , True )
def __init__( self, fromDict = None ): """c'tor :param self: self reference """ Record.__init__( self ) self.__waiting = None now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Done" self.__data__["JobID"] = 0 self.__data__["RequestID"] = 0 proxyInfo = getProxyInfo() if proxyInfo["OK"]: proxyInfo = proxyInfo["Value"] if proxyInfo["validGroup"] and proxyInfo["validDN"]: self.OwnerDN = proxyInfo["identity"] self.OwnerGroup = proxyInfo["group"] self.__dirty = [] self.__operations__ = TypedList( allowedTypes = Operation ) fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for opDict in fromDict.get( "Operations", [] ): self +=Operation( opDict ) if "Operations" in fromDict: del fromDict["Operations"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Request attribute '%s'" % key ) if value: setattr( self, key, value ) self._notify()
def test01ctor(self): """ c'tor test """ NumericList = TypedList(allowedTypes=self.numericTypes) FloatList = TypedList(allowedTypes=self.floatType) TestClassList = TypedList(allowedTypes=self.testClassType) self.assertEqual(isinstance(NumericList, TypedList), True) self.assertEqual(isinstance(FloatList, TypedList), True) self.assertEqual(isinstance(TestClassList, TypedList), True) self.assertEqual(NumericList.allowedTypes() == self.numericTypes, True) self.assertEqual(FloatList.allowedTypes() == self.floatType, True) self.assertEqual(TestClassList.allowedTypes() == self.testClassType, True) self.assertRaises(TypeError, TypedList.__init__, (), {"allowedTypes": (1, 2, 3)})
def __init__(self, fromDict=None): """ c'tor :param self: self reference :param dict fromDict: attributes dictionary """ Record.__init__(self) self._parent = None # # sub-request attributes # self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["CreationTime"] = now self.__data__["OperationID"] = 0 self.__data__["RequestID"] = 0 self.__data__["Status"] = "Queued" # # operation files self.__files__ = TypedList(allowedTypes=File) # # dirty fileIDs self.__dirty = [] # # init from dict fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get("__dirty", []) if "__dirty" in fromDict: del fromDict["__dirty"] for fileDict in fromDict.get("Files", []): self.addFile(File(fileDict)) if "Files" in fromDict: del fromDict["Files"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown Operation attribute '%s'" % key) if key != "Order" and value: setattr(self, key, value)
def __init__(self, fromDict=None): """c'tor :param self: self reference :param dict fromDict: data dict """ self.__data__ = dict.fromkeys(self.tableDesc()["Fields"].keys(), None) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList(allowedTypes=FTSFile) self._fc = FileCatalog() self._fts3context = None self._states = tuple( set(self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES)) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get("FTSFiles", []): self += FTSFile(ftsFileDict) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown FTSJob attribute '%s'" % key) if value: setattr(self, key, value) self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % (self.RequestID, self.FTSGUID), True)
def test02_add_iadd_radd(self): """ += +lvalue +rvalue """ NumericList = TypedList((1, 1.0, 1), self.numericTypes) ## += NumericList += [2, 2.0, 2] self.assertEqual(len(NumericList), 6) self.assertEqual(NumericList, [1, 1.0, 1, 2, 2.0, 2]) ## +lvalue lList = NumericList + [3, 3.0, 3] self.assertEqual(len(lList), 9) self.assertEqual(lList, [1, 1.0, 1, 2, 2.0, 2, 3, 3.0, 3]) ## rvalue+ rList = [0, 0.0, 0] + NumericList self.assertEqual(len(rList), 9) self.assertEqual(rList, [0, 0.0, 0, 1, 1.0, 1, 2, 2.0, 2])
def __init__( self, fromDict = None ): """ c'tor :param self: self reference :param dict fromDict: attributes dictionary """ Record.__init__( self ) self._parent = None # # sub-request attributes # self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["CreationTime"] = now self.__data__["OperationID"] = 0 self.__data__["RequestID"] = 0 self.__data__["Status"] = "Queued" # # operation files self.__files__ = TypedList( allowedTypes = File ) # # dirty fileIDs self.__dirty = [] # # init from dict fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for fileDict in fromDict.get( "Files", [] ): self.addFile( File( fileDict ) ) if "Files" in fromDict: del fromDict["Files"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Operation attribute '%s'" % key ) if key != "Order" and value: setattr( self, key, value )
def test01ctor( self ): """ c'tor test """ NumericList = TypedList( allowedTypes = self.numericTypes ) FloatList = TypedList( allowedTypes = self.floatType ) TestClassList = TypedList( allowedTypes = self.testClassType ) self.assertEqual( isinstance( NumericList, TypedList ), True ) self.assertEqual( isinstance( FloatList, TypedList ), True) self.assertEqual( isinstance( TestClassList, TypedList ), True) self.assertEqual( NumericList.allowedTypes() == self.numericTypes, True ) self.assertEqual( FloatList.allowedTypes() == self.floatType, True ) self.assertEqual( TestClassList.allowedTypes() == self.testClassType, True ) self.assertRaises( TypeError, TypedList.__init__, (), { "allowedTypes" : (1,2,3) } )
class Request( Record ): """ .. class:: Request :param int RequestID: requestID :param str Name: request' name :param str OwnerDN: request's owner DN :param str OwnerGroup: request owner group :param str Setup: DIRAC setup :param str SourceComponent: whatever :param int JobID: jobID :param datetime.datetime CreationTime: UTC datetime :param datetime.datetime SubmissionTime: UTC datetime :param datetime.datetime LastUpdate: UTC datetime :param str Status: request's status :param TypedList operations: list of operations """ ALL_STATES = ( "Waiting", "Failed", "Done", "Scheduled", "Assigned", "Canceled" ) FINAL_STATES = ( "Done", "Failed", "Canceled" ) def __init__( self, fromDict = None ): """c'tor :param self: self reference """ Record.__init__( self ) self.__waiting = None now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Done" self.__data__["JobID"] = 0 self.__data__["RequestID"] = 0 proxyInfo = getProxyInfo() if proxyInfo["OK"]: proxyInfo = proxyInfo["Value"] if proxyInfo["validGroup"] and proxyInfo["validDN"]: self.OwnerDN = proxyInfo["identity"] self.OwnerGroup = proxyInfo["group"] self.__dirty = [] self.__operations__ = TypedList( allowedTypes = Operation ) fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for opDict in fromDict.get( "Operations", [] ): self +=Operation( opDict ) if "Operations" in fromDict: del fromDict["Operations"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Request attribute '%s'" % key ) if value: setattr( self, key, value ) self._notify() @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "RequestID" : "INTEGER NOT NULL AUTO_INCREMENT", "RequestName" : "VARCHAR(255) NOT NULL", "OwnerDN" : "VARCHAR(255)", "OwnerGroup" : "VARCHAR(32)", "Status" : "ENUM('Waiting', 'Assigned', 'Done', 'Failed', 'Canceled', 'Scheduled') DEFAULT 'Waiting'", "Error" : "VARCHAR(255)", "DIRACSetup" : "VARCHAR(32)", "SourceComponent" : "BLOB", "JobID" : "INTEGER DEFAULT 0", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "RequestID", "RequestName" ], "Indexes" : { "RequestName" : [ "RequestName"] } } def _notify( self ): """ simple state machine for sub request statuses """ self.__waiting = None # # update operations statuses rStatus = "Waiting" opStatusList = [ ( op.Status, op ) for op in self ] self.__waiting = None isScheduled = False isWaiting = False while opStatusList: opStatus, op = opStatusList.pop( 0 ) # # Failed -> Failed if opStatus == "Failed": rStatus = "Failed" break # Scheduled -> Scheduled if opStatus == "Scheduled": if not isWaiting: rStatus = "Scheduled" self.__waiting = op isScheduled = True continue if opStatus == "Queued": if isScheduled or isWaiting: continue else: # not isWaiting: op._setWaiting( self ) self.__waiting = op rStatus = "Waiting" isWaiting = True if opStatus == "Waiting": if isScheduled or isWaiting: op._setQueued( self ) rStatus = "Waiting" else: self.__waiting = op isWaiting = True rStatus = "Waiting" if opStatus == "Done": if isScheduled or isWaiting: continue else: rStatus = "Done" self.Status = rStatus def getWaiting( self ): """ get waiting operation if any """ # # update states self._notify() return S_OK( self.__waiting ) # # Operation arithmetics def __contains__( self, operation ): """ in operator :param self: self reference :param Operation subRequest: a subRequest """ return bool( operation in self.__operations__ ) def __iadd__( self, operation ): """ += operator for subRequest :param self: self reference :param Operation operation: sub-request to add """ if operation not in self: self.__operations__.append( operation ) operation._parent = self self._notify() return self def insertBefore( self, newOperation, existingOperation ): """ insert :newOperation: just before :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: previous Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ), newOperation ) newOperation._parent = self self._notify() return S_OK() def insertAfter( self, newOperation, existingOperation ): """ insert :newOperation: just after :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: next Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ) + 1, newOperation ) newOperation._parent = self self._notify() return S_OK() def addOperation( self, operation ): """ add :operation: to list of Operations :param self: self reference :param Operation operation: Operation to be inserted """ if operation in self: return S_ERROR( "This operation is already in!!!" ) self +=operation return S_OK() def isEmpty( self ): """ Evaluate if the request is empty """ return len( self.__operations__ ) == 0 def __iter__( self ): """ iterator for sub-request """ return self.__operations__.__iter__() def __getitem__( self, i ): """ [] op for sub requests """ return self.__operations__.__getitem__( i ) def __setitem__( self, i, value ): """ self[i] = val """ if self[i].OperationID: self.__dirty.append( self[i].OperationID ) self.__operations__.__setitem__( i, value ) value._parent = self self._notify() def __delitem__( self, i ): """ del self[i]""" if not self.RequestID: self.__operations__.__delitem__( i ) else: opId = self[i].OperationID if opId: self.__dirty.append( opId ) self.__operations__.__delitem__( i ) self._notify() def indexOf( self, subReq ): """ return index of subReq (execution order) """ return self.__operations__.index( subReq ) if subReq in self else -1 def __len__( self ): """ nb of subRequests """ return len( self.__operations__ ) def __str__( self ): """ str operator """ return str( self.toJSON()["Value"] ) def subStatusList( self ): """ list of statuses for all operations """ return [ subReq.Status for subReq in self ] # # properties @property def RequestID( self ): """ request ID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ requestID setter (shouldn't be RO???) """ self.__data__["RequestID"] = long( value ) if value else 0 @property def RequestName( self ): """ request's name getter """ return self.__data__["RequestName"] @RequestName.setter def RequestName( self, value ): """ request name setter """ if type( value ) != str: raise TypeError( "RequestName should be a string" ) self.__data__["RequestName"] = value[:128] @property def OwnerDN( self ): """ request owner DN getter """ return self.__data__["OwnerDN"] @OwnerDN.setter def OwnerDN( self, value ): """ request owner DN setter """ if type( value ) != str: raise TypeError( "OwnerDN should be a string!" ) self.__data__["OwnerDN"] = value @property def OwnerGroup( self ): """ request owner group getter """ return self.__data__["OwnerGroup"] @OwnerGroup.setter def OwnerGroup( self, value ): """ request owner group setter """ if type( value ) != str: raise TypeError( "OwnerGroup should be a string!" ) self.__data__["OwnerGroup"] = value @property def DIRACSetup( self ): """ DIRAC setup getter """ return self.__data__["DIRACSetup"] @DIRACSetup.setter def DIRACSetup( self, value ): """ DIRAC setup setter """ if type( value ) != str: raise TypeError( "setup should be a string!" ) self.__data__["DIRACSetup"] = value @property def SourceComponent( self ): """ source component getter """ return self.__data__["SourceComponent"] @SourceComponent.setter def SourceComponent( self, value ): """ source component setter """ if type( value ) != str: raise TypeError( "Setup should be a string!" ) self.__data__["SourceComponent"] = value @property def JobID( self ): """ jobID getter """ return self.__data__["JobID"] @JobID.setter def JobID( self, value = 0 ): """ jobID setter """ self.__data__["JobID"] = long( value ) if value else 0 @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def Status( self ): """ status getter """ self._notify() return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ if value not in Request.ALL_STATES: raise ValueError( "Unknown status: %s" % str( value ) ) self.__data__["Status"] = value @property def Order( self ): """ ro execution order getter """ self._notify() opStatuses = [ op.Status for op in self.__operations__ ] return opStatuses.index( "Waiting" ) if "Waiting" in opStatuses else len( opStatuses ) @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, value ): """ error setter """ if type( value ) != str: raise TypeError( "Error has to be a string!" ) self.__data__["Error"] = self._escapeStr( value, 255 ) def toSQL( self ): """ prepare SQL INSERT or UPDATE statement """ colVals = [ ( "`%s`" % column, "'%s'" % value if type( value ) in ( str, datetime.datetime ) else str( value ) ) for column, value in self.__data__.items() if value and column not in ( "RequestID", "LastUpdate" ) ] colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) query = [] if self.RequestID: query.append( "UPDATE `Request` SET " ) query.append( ", ".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `RequestID`=%d;\n" % self.RequestID ) else: query.append( "INSERT INTO `Request` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;" % values ) return S_OK( "".join( query ) ) def cleanUpSQL( self ): """ delete query for dirty operations """ query = [] if self.RequestID and self.__dirty: opIDs = ",".join( [ str( opID ) for opID in self.__dirty ] ) query.append( "DELETE FROM `Operation` WHERE `RequestID`=%s AND `OperationID` IN (%s);\n" % ( self.RequestID, opIDs ) ) for opID in self.__dirty: query.append( "DELETE FROM `File` WHERE `OperationID`=%s;\n" % opID ) return query # # digest def toJSON( self ): """ serialize to JSON format """ digest = dict( zip( self.__data__.keys(), [ str( val ) if val else "" for val in self.__data__.values() ] ) ) digest["RequestID"] = self.RequestID digest["Operations"] = [] digest["__dirty"] = self.__dirty for op in self: opJSON = op.toJSON() if not opJSON["OK"]: return opJSON digest["Operations"].append( opJSON["Value"] ) return S_OK( digest ) def getDigest( self ): """ return digest for request """ digest = ['Name:' + self.RequestName] for op in self: opDigest = [ str( item ) for item in ( op.Type, op.Type, op.Status, op.Order ) ] if op.TargetSE: opDigest.append( op.TargetSE ) if op.Catalog: opDigest.append( op.Catalog ) if len( op ): opFile = op[0] opDigest.append( opFile.LFN ) opDigest.append( ",...<%d files>" % len( op ) ) digest.append( ":".join( opDigest ) ) return S_OK( "\n".join( digest ) )
class Request(Record): """ .. class:: Request :param int RequestID: requestID :param str Name: request' name :param str OwnerDN: request's owner DN :param str OwnerGroup: request owner group :param str Setup: DIRAC setup :param str SourceComponent: whatever :param int JobID: jobID :param datetime.datetime CreationTime: UTC datetime :param datetime.datetime SubmissionTime: UTC datetime :param datetime.datetime LastUpdate: UTC datetime :param str Status: request's status :param TypedList operations: list of operations """ ALL_STATES = ("Waiting", "Failed", "Done", "Scheduled", "Assigned", "Canceled") FINAL_STATES = ("Done", "Failed", "Canceled") def __init__(self, fromDict=None): """c'tor :param self: self reference """ Record.__init__(self) self.__waiting = None now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Done" self.__data__["JobID"] = 0 self.__data__["RequestID"] = 0 proxyInfo = getProxyInfo() if proxyInfo["OK"]: proxyInfo = proxyInfo["Value"] if proxyInfo["validGroup"] and proxyInfo["validDN"]: self.OwnerDN = proxyInfo["identity"] self.OwnerGroup = proxyInfo["group"] self.__dirty = [] self.__operations__ = TypedList(allowedTypes=Operation) fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get("__dirty", []) if "__dirty" in fromDict: del fromDict["__dirty"] for opDict in fromDict.get("Operations", []): self += Operation(opDict) if "Operations" in fromDict: del fromDict["Operations"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown Request attribute '%s'" % key) if value: setattr(self, key, value) self._notify() @staticmethod def tableDesc(): """ get table desc """ return { "Fields": { "RequestID": "INTEGER NOT NULL AUTO_INCREMENT", "RequestName": "VARCHAR(255) NOT NULL", "OwnerDN": "VARCHAR(255)", "OwnerGroup": "VARCHAR(32)", "Status": "ENUM('Waiting', 'Assigned', 'Done', 'Failed', 'Canceled', 'Scheduled') DEFAULT 'Waiting'", "Error": "VARCHAR(255)", "DIRACSetup": "VARCHAR(32)", "SourceComponent": "BLOB", "JobID": "INTEGER DEFAULT 0", "CreationTime": "DATETIME", "SubmitTime": "DATETIME", "LastUpdate": "DATETIME" }, "PrimaryKey": ["RequestID"], 'UniqueIndexes': { 'RequestName': ['RequestName'] } } def _notify(self): """ simple state machine for sub request statuses """ self.__waiting = None # # update operations statuses rStatus = "Waiting" opStatusList = [(op.Status, op) for op in self] self.__waiting = None while opStatusList: # # Scan all status in order! opStatus, op = opStatusList.pop(0) # # Failed -> Failed if opStatus == "Failed": rStatus = "Failed" break # Scheduled -> Scheduled if opStatus == "Scheduled": if self.__waiting == None: self.__waiting = op rStatus = "Scheduled" # # First operation Queued becomes Waiting if no Waiting/Scheduled before elif opStatus == "Queued": if self.__waiting == None: self.__waiting = op op._setWaiting(self) rStatus = "Waiting" # # First operation Waiting is next to execute, others are queued elif opStatus == "Waiting": rStatus = "Waiting" if self.__waiting == None: self.__waiting = op else: op._setQueued(self) # # All operations Done -> Done elif opStatus == "Done" and self.__waiting == None: rStatus = "Done" self.__data__['Error'] = '' self.Status = rStatus def getWaiting(self): """ get waiting operation if any """ # # update states self._notify() return S_OK(self.__waiting) # # Operation arithmetics def __contains__(self, operation): """ in operator :param self: self reference :param Operation subRequest: a subRequest """ return bool(operation in self.__operations__) def __iadd__(self, operation): """ += operator for subRequest :param self: self reference :param Operation operation: sub-request to add """ if operation not in self: self.__operations__.append(operation) operation._parent = self self._notify() return self def insertBefore(self, newOperation, existingOperation): """ insert :newOperation: just before :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: previous Operation sibling """ if existingOperation not in self: return S_ERROR("%s is not in" % existingOperation) if newOperation in self: return S_ERROR("%s is already in" % newOperation) self.__operations__.insert( self.__operations__.index(existingOperation), newOperation) newOperation._parent = self self._notify() return S_OK() def insertAfter(self, newOperation, existingOperation): """ insert :newOperation: just after :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: next Operation sibling """ if existingOperation not in self: return S_ERROR("%s is not in" % existingOperation) if newOperation in self: return S_ERROR("%s is already in" % newOperation) self.__operations__.insert( self.__operations__.index(existingOperation) + 1, newOperation) newOperation._parent = self self._notify() return S_OK() def addOperation(self, operation): """ add :operation: to list of Operations :param self: self reference :param Operation operation: Operation to be inserted """ if operation in self: return S_ERROR("This operation is already in!!!") self += operation return S_OK() def isEmpty(self): """ Evaluate if the request is empty """ return len(self.__operations__) == 0 def __iter__(self): """ iterator for sub-request """ return self.__operations__.__iter__() def __getitem__(self, i): """ [] op for sub requests """ return self.__operations__.__getitem__(i) def __setitem__(self, i, value): """ self[i] = val """ self.__operations__._typeCheck(value) if self[i].OperationID: self.__dirty.append(self[i].OperationID) self.__operations__.__setitem__(i, value) value._parent = self self._notify() def __delitem__(self, i): """ del self[i]""" if not self.RequestID: self.__operations__.__delitem__(i) else: opId = self[i].OperationID if opId: self.__dirty.append(opId) self.__operations__.__delitem__(i) self._notify() def indexOf(self, subReq): """ return index of subReq (execution order) """ return self.__operations__.index(subReq) if subReq in self else -1 def __nonzero__(self): """ for comparisons """ return True def __len__(self): """ nb of subRequests """ return len(self.__operations__) def __str__(self): """ str operator """ return str(self.toJSON()["Value"]) def subStatusList(self): """ list of statuses for all operations """ return [subReq.Status for subReq in self] # # properties @property def RequestID(self): """ request ID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID(self, value): """ requestID setter (shouldn't be RO???) """ self.__data__["RequestID"] = long(value) if value else 0 @property def RequestName(self): """ request's name getter """ return self.__data__["RequestName"] @RequestName.setter def RequestName(self, value): """ request name setter """ if type(value) != str: raise TypeError("RequestName should be a string") self.__data__["RequestName"] = value[:128] @property def OwnerDN(self): """ request owner DN getter """ return self.__data__["OwnerDN"] @OwnerDN.setter def OwnerDN(self, value): """ request owner DN setter """ if type(value) != str: raise TypeError("OwnerDN should be a string!") self.__data__["OwnerDN"] = value @property def OwnerGroup(self): """ request owner group getter """ return self.__data__["OwnerGroup"] @OwnerGroup.setter def OwnerGroup(self, value): """ request owner group setter """ if type(value) != str: raise TypeError("OwnerGroup should be a string!") self.__data__["OwnerGroup"] = value @property def DIRACSetup(self): """ DIRAC setup getter """ return self.__data__["DIRACSetup"] @DIRACSetup.setter def DIRACSetup(self, value): """ DIRAC setup setter """ if type(value) != str: raise TypeError("setup should be a string!") self.__data__["DIRACSetup"] = value @property def SourceComponent(self): """ source component getter """ return self.__data__["SourceComponent"] @SourceComponent.setter def SourceComponent(self, value): """ source component setter """ if type(value) != str: raise TypeError("Setup should be a string!") self.__data__["SourceComponent"] = value @property def JobID(self): """ jobID getter """ return self.__data__["JobID"] @JobID.setter def JobID(self, value=0): """ jobID setter """ self.__data__["JobID"] = long(value) if value else 0 @property def CreationTime(self): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime(self, value=None): """ creation time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("CreationTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["CreationTime"] = value @property def SubmitTime(self): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime(self, value=None): """ submission time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("SubmitTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["SubmitTime"] = value @property def LastUpdate(self): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate(self, value=None): """ last update setter """ if type(value) not in (datetime.datetime, str): raise TypeError("LastUpdate should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["LastUpdate"] = value @property def Status(self): """ status getter """ self._notify() return self.__data__["Status"] @Status.setter def Status(self, value): """ status setter """ if value not in Request.ALL_STATES: raise ValueError("Unknown status: %s" % str(value)) # If the status moved to Failed or Done, update the lastUpdate time if value in ('Done', 'Failed'): if value != self.__data__["Status"]: self.LastUpdate = datetime.datetime.utcnow().replace( microsecond=0) if value == 'Done': self.__data__['Error'] = '' self.__data__["Status"] = value @property def Order(self): """ ro execution order getter """ self._notify() opStatuses = [op.Status for op in self.__operations__] return opStatuses.index("Waiting") if "Waiting" in opStatuses else len( opStatuses) @property def Error(self): """ error getter """ return self.__data__["Error"] @Error.setter def Error(self, value): """ error setter """ if type(value) != str: raise TypeError("Error has to be a string!") self.__data__["Error"] = self._escapeStr(value, 255) def toSQL(self): """ prepare SQL INSERT or UPDATE statement """ colVals = [ ("`%s`" % column, "'%s'" % value if type(value) in (str, datetime.datetime) else str(value) if value != None else "NULL") for column, value in self.__data__.items() if (column == 'Error' or value) and column not in ("RequestID", "LastUpdate") ] colVals.append(("`LastUpdate`", "UTC_TIMESTAMP()")) query = [] if self.RequestID: query.append("UPDATE `Request` SET ") query.append(", ".join(["%s=%s" % item for item in colVals])) query.append(" WHERE `RequestID`=%d;\n" % self.RequestID) else: query.append("INSERT INTO `Request` ") columns = "(%s)" % ",".join([column for column, value in colVals]) values = "(%s)" % ",".join([value for column, value in colVals]) query.append(columns) query.append(" VALUES %s;" % values) return S_OK("".join(query)) def cleanUpSQL(self): """ delete query for dirty operations """ query = [] if self.RequestID and self.__dirty: opIDs = ",".join([str(opID) for opID in self.__dirty]) query.append( "DELETE FROM `Operation` WHERE `RequestID`=%s AND `OperationID` IN (%s);\n" % (self.RequestID, opIDs)) for opID in self.__dirty: query.append("DELETE FROM `File` WHERE `OperationID`=%s;\n" % opID) return query # # digest def toJSON(self): """ serialize to JSON format """ digest = dict([(key, str(getattr(self, key)) if getattr(self, key) else "") for key in self.__data__]) digest["RequestID"] = self.RequestID digest["__dirty"] = self.__dirty digest["Operations"] = [op.toJSON()['Value'] for op in self] return S_OK(digest) def getDigest(self): """ return digest for request """ digest = ['Name:' + self.RequestName] for op in self: opDigest = [ str(item) for item in (op.Type, op.Type, op.Status, op.Order) ] if op.TargetSE: opDigest.append(op.TargetSE) if op.Catalog: opDigest.append(op.Catalog) if len(op): opFile = op[0] opDigest.append(opFile.LFN) opDigest.append(",...<%d files>" % len(op)) digest.append(":".join(opDigest)) return S_OK("\n".join(digest)) def optimize(self): """ Merges together the operations that can be merged. They need to have the following arguments equal: * Type * Arguments * SourceSE * TargetSE * Catalog It also makes sure that the maximum number of Files in an Operation is never overcome. CAUTION: this method is meant to be called before inserting into the DB. So if the RequestId is not 0, we don't touch :return S_ERROR if the Request should not be optimized (because already in the DB S_OK(True) if a optimization was carried out S_OK(False) if no optimization were carried out """ # Set to True if the request could be optimized optimized = False # List of attributes that must be equal for operations to be merged attrList = ["Type", "Arguments", "SourceSE", "TargetSE", "Catalog"] i = 0 # If the RequestID is not the default one (0), it probably means # the Request is already in the DB, so we don't touch anything if self.RequestID != 0: return S_ERROR( "Cannot optimize because Request seems to be already in the DB (RequestID %s)" % self.RequestID) # We could do it with a single loop (the 2nd one), but by doing this, # we can replace # i += 1 # continue # # with # break # # which is nicer in my opinion while i < len(self.__operations__): while (i + 1) < len(self.__operations__): # Some attributes need to be the same attrMismatch = False for attr in attrList: if getattr(self.__operations__[i], attr) != getattr( self.__operations__[i + 1], attr): attrMismatch = True break if attrMismatch: break # We do not do the merge if there are common files in the operations fileSetA = set(list(f.LFN for f in self.__operations__[i])) fileSetB = set(list(f.LFN for f in self.__operations__[i + 1])) if len(fileSetA & fileSetB): break # There is a maximum number of files one can add into an operation try: while len(self.__operations__[i + 1]): self.__operations__[i] += self.__operations__[i + 1][0] del self.__operations__[i + 1][0] optimized = True del self.__operations__[i + 1] except RuntimeError: i += 1 i += 1 return S_OK(optimized)
class FTSJob( Record ): """ .. class:: FTSJob class describing one FTS job """ # # replica manager __replicaManager = None # # initial states INITSTATES = ( "Submitted", "Ready", "Staging" ) # # ongoing transfer states TRANSSTATES = ( "Active", "Hold" ) # # failed states FAILEDSTATES = ( "Canceled", "Failed" ) # # finished FINALSTATES = ( "Finished", "FinishedDirty", "Failed", "Canceled" ) # # missing source regexp patterns missingSourceErrors = [ re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[INVALID_PATH\] Failed" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[INVALID_PATH\] No such file or directory" ), re.compile( r"SOURCE error during PREPARATION phase: \[INVALID_PATH\] Failed" ), re.compile( r"SOURCE error during PREPARATION phase: \[INVALID_PATH\] The requested file either does not exist" ), re.compile( r"TRANSFER error during TRANSFER phase: \[INVALID_PATH\] the server sent an error response: 500 500"\ " Command failed. : open error: No such file or directory" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[USER_ERROR\] source file doesnt exist" ) ] def __init__( self, fromDict = None ): """c'tor :param self: self reference :param dict fromDict: data dict """ Record.__init__( self ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self.__files__ = TypedList( allowedTypes = FTSFile ) self._log = gLogger.getSubLogger( "FTSJob-%s" % self.FTSJobID , True ) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get( "FTSFiles", [] ): self +=FTSFile( ftsFileDict ) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown FTSJob attribute '%s'" % key ) if value: setattr( self, key, value ) @classmethod def replicaManager( cls ): """ get replica manager """ if not cls.__replicaManager: cls.__replicaManager = ReplicaManager() return cls.__replicaManager @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "FTSJobID" : "INTEGER NOT NULL AUTO_INCREMENT", "FTSGUID" : "VARCHAR(64) NOT NULL", "OperationID": "INTEGER NOT NULL", "RequestID": "INTEGER NOT NULL", "SourceSE" : "VARCHAR(128) NOT NULL", "TargetSE" : "VARCHAR(128) NOT NULL", "FTSServer" : "VARCHAR(255) NOT NULL", "TargetToken": "VARCHAR(255)", "SourceToken": "VARCHAR(255)", "Size": "INTEGER NOT NULL", "Files": "INTEGER NOT NULL", "Completeness": "INTEGER NOT NULL DEFAULT 0", "FailedFiles": "INTEGER DEFAULT 0", "FailedSize": "INTEGER DEFAULT 0", "Status" : "ENUM( 'Submitted', 'Ready', 'Staging', 'Canceled', 'Active', 'Hold', "\ "'Failed', 'Finished', 'FinishedDirty' ) DEFAULT 'Submitted'", "Error" : "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "FTSJobID" ], "Indexes" : { "FTSJobID" : [ "FTSJobID" ], "FTSGUID": [ "FTSGUID" ] } } @property def FTSJobID( self ): """ FTSJobID getter """ return self.__data__["FTSJobID"] @FTSJobID.setter def FTSJobID( self, value ): """ FTSJobID setter """ self.__data__["FTSJobID"] = long( value ) if value else 0 @property def RequestID( self ): """ RequestID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ RequestID setter """ value = long( value ) if value else 0 self.__data__["RequestID"] = value @property def OperationID( self ): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID( self, value ): """ OperationID setter """ value = long( value ) if value else 0 self.__data__["OperationID"] = value @property def FTSGUID( self ): """ FTSGUID prop """ return self.__data__["FTSGUID"] @FTSGUID.setter def FTSGUID( self, value ): """ FTSGUID setter """ if value: if type( value ) not in ( str, unicode ): raise TypeError( "FTSGUID should be a string!" ) if not checkGuid( value ): raise ValueError( "'%s' is not a valid GUID!" % str( value ) ) self.__data__["FTSGUID"] = value @property def FTSServer( self ): """ FTSServer getter """ return self.__data__["FTSServer"] @FTSServer.setter def FTSServer( self, url ): """ FTSServer getter """ self.__data__["FTSServer"] = url @property def Completeness( self ): """ completeness getter """ return self.__data__["Completeness"] @Completeness.setter def Completeness( self, value ): """ completeness setter """ self.__data__["Completeness"] = int( value ) if value else 0 @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, error ): """ error setter """ self.__data__["Error"] = str( error )[255:] @property def Files( self ): """ nb files getter """ self.__data__["Files"] = len( self ) return self.__data__["Files"] @Files.setter def Files( self, value ): """ nb files setter """ self.__data__["Files"] = len( self ) @property def Status( self ): """ status prop """ if not self.__data__["Status"]: self.__data__["Status"] = "Waiting" return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ reStatus = re.compile( "Submitted|Ready|Staging|Hold|Canceled|Active|Failed|Finished|FinishedDirty|Assigned" ) if not reStatus.match( value ): raise ValueError( "Unknown FTSJob Status: %s" % str( value ) ) self.__data__["Status"] = value @property def FailedFiles( self ): """ nb failed files getter """ self.__data__["FailedFiles"] = len( [ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) return self.__data__["FailedFiles"] @FailedFiles.setter def FailedFiles( self, value ): """ nb failed files setter """ if value: self.__data__["FailedFiles"] = value else: self.__data__["FailedFiles"] = sum( [ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) @property def Size( self ): """ size getter """ # if not self.__data__["Size"]: self.__data__["Size"] = sum( [ ftsFile.Size for ftsFile in self ] ) return self.__data__["Size"] @Size.setter def Size( self, value ): """ size setter """ if value: self.__data__["Size"] = value else: self.__data__["Size"] = sum( [ ftsFile.Size for ftsFile in self ] ) @property def FailedSize( self ): """ size getter """ if not self.__data__["FailedSize"]: self.__data__["FailedSize"] = sum( [ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) return self.__data__["FailedSize"] @FailedSize.setter def FailedSize( self, value ): """ size setter """ if value: self.__data__["FailedSize"] = value else: self.__data__["FailedSize"] = sum( [ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def TargetSE( self ): """ target SE getter """ return self.__data__["TargetSE"] @TargetSE.setter def TargetSE( self, targetSE ): """ target SE setter """ self.__data__["TargetSE"] = targetSE @property def SourceSE( self ): """ source SE getter """ return self.__data__["SourceSE"] @SourceSE.setter def SourceSE( self, sourceSE ): """ source SE setter """ self.__data__["SourceSE"] = sourceSE @property def SourceToken( self ): """ source token getter """ return self.__data__["SourceToken"] @SourceToken.setter def SourceToken( self, sourceToken ): """ source SE setter """ self.__data__["SourceToken"] = sourceToken @property def TargetToken( self ): """ target token getter """ return self.__data__["TargetToken"] @TargetToken.setter def TargetToken( self, targetToken ): """ target SE setter """ self.__data__["TargetToken"] = targetToken # # FTSJobFiles arithmetics def __contains__( self, subFile ): """ in operator """ return subFile in self.__files__ def __iadd__( self, ftsFile ): """ += operator """ if ftsFile not in self: self.__files__.append( ftsFile ) ftsFile._parent = self self.Files self.Size return self def __add__( self, ftsFile ): """ + operator """ self +=ftsFile def addFile( self, ftsFile ): """ add :ftsFile: to FTS job """ self +=ftsFile def subFile( self, ftsFile ): """ remove ftsFile from this job """ if ftsFile in self: ftsFile._parent = None self.__files__.remove( ftsFile ) # # helpers for looping def __iter__( self ): """ files iterator """ return self.__files__.__iter__() def __getitem__( self, i ): """ [] op for files """ return self.__files__.__getitem__( i ) def __delitem__( self, i ): """ del ftsJob[i] """ self.__files__.__delitem__( i ) def __setitem__( self, i, ftsFile ): """ ftsJob[i] = ftsFile """ self.__files__.__setitem__( i, ftsFile ) def fileStatusList( self ): """ get list of files statuses """ return [ ftsFile.Status for ftsFile in self ] def __len__( self ): """ nb of subFiles """ return len( self.__files__ ) def _surlPairs( self ): """ create and return SURL pair file """ surls = [] for ftsFile in self: checksum = "%s:%s" % ( ftsFile.ChecksumType, ftsFile.Checksum ) if ftsFile.ChecksumType and ftsFile.Checksum else "" surls.append( "%s %s %s" % ( ftsFile.SourceSURL, ftsFile.TargetSURL, checksum ) ) return "\n".join( surls ) def submitFTS2( self, stageFiles = False ): """ submit fts job using FTS2 client """ if self.FTSGUID: return S_ERROR( "FTSJob already has been submitted" ) surls = self._surlPairs() if not surls: return S_ERROR( "No files to submit" ) fd, fileName = tempfile.mkstemp() surlFile = os.fdopen( fd, 'w' ) surlFile.write( surls ) surlFile.close() submitCommand = [ "glite-transfer-submit", "-s", self.FTSServer, "-f", fileName, "-o", "--compare-checksums" ] if self.TargetToken: submitCommand.append( "-t %s" % self.TargetToken ) if self.SourceToken: submitCommand.append( "-S %s" % self.SourceToken ) if stageFiles: submitCommand.append( "--copy-pin-lifetime 86400" ) submit = executeGridCommand( "", submitCommand ) os.remove( fileName ) if not submit["OK"]: return submit returnCode, output, errStr = submit["Value"] if not returnCode == 0: return S_ERROR( errStr ) self.FTSGUID = output.replace( "\n", "" ) self.Status = "Submitted" for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def monitorFTS2( self, full = False ): """ monitor fts job """ if not self.FTSGUID: return S_ERROR( "FTSGUID not set, FTS job not submitted?" ) monitorCommand = [ "glite-transfer-status", "--verbose", "-s", self.FTSServer, self.FTSGUID ] if full: monitorCommand.append( "-l" ) monitor = executeGridCommand( "", monitorCommand ) if not monitor["OK"]: return monitor returnCode, outputStr, errStr = monitor["Value"] # Returns a non zero status if error if returnCode != 0: return S_ERROR( errStr ) outputStr = outputStr.replace( "'" , "" ).replace( "<", "" ).replace( ">", "" ) # # set FTS job status regExp = re.compile( "Status:\s+(\S+)" ) self.Status = re.search( regExp, outputStr ).group( 1 ) statusSummary = {} for state in FTSFile.ALL_STATES: regExp = re.compile( "\s+%s:\s+(\d+)" % state ) if regExp.search( outputStr ): statusSummary[state] = int( re.search( regExp, outputStr ).group( 1 ) ) total = sum( statusSummary.values() ) completed = sum( [ statusSummary.get( state, 0 ) for state in FTSFile.FINAL_STATES ] ) self.Completeness = 100 * completed / total if not full: return S_OK( statusSummary ) regExp = re.compile( "[ ]+Source:[ ]+(\S+)\n[ ]+Destination:[ ]+(\S+)\n[ ]+State:[ ]+(\S+)\n[ ]+Retries:[ ]+(\d+)\n[ ]+Reason:[ ]+([\S ]+).+?[ ]+Duration:[ ]+(\d+)", re.S ) fileInfo = re.findall( regExp, outputStr ) for sourceURL, targetURL, fileStatus, retries, reason, duration in fileInfo: candidateFile = None for ftsFile in self: if ftsFile.SourceSURL == sourceURL: candidateFile = ftsFile break if not candidateFile: continue candidateFile.Status = fileStatus candidateFile.Error = reason if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match( reason ): candidateFile.Error = "MissingSource" # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def finalize( self ): """ register successfully transferred files """ if self.Status not in FTSJob.FINALSTATES: return S_OK() targetSE = StorageElement( self.TargetSE ) toRegister = [ ftsFile for ftsFile in self if ftsFile.Status == "Finished" ] toRegisterDict = {} for ftsFile in toRegister: pfn = targetSE.getPfnForProtocol( ftsFile.TargetSURL, "SRM2", withPort = False ) if not pfn["OK"]: continue pfn = pfn["Value"] toRegisterDict[ ftsFile.LFN ] = { "PFN": pfn, "SE": self.TargetSE } if toRegisterDict: register = self.replicaManager().addCatalogReplica( toRegisterDict ) if not register["OK"]: for ftsFile in toRegister: ftsFile.Error = "AddCatalogReplicaFailed" print ftsFile.Error return register register = register["Value"] failedFiles = register["Failed"] if "Failed" in register else {} for ftsFile in toRegister: if ftsFile.LFN in failedFiles: ftsFile.Error = "AddCatalogReplicaFailed" print ftsFile.Error return S_OK() def toSQL( self ): """ prepare SQL INSERT or UPDATE statement :return: str with SQL fragment """ colVals = [ ( "`%s`" % column, "'%s'" % value if type( value ) in ( str, datetime.datetime ) else str( value ) ) for column, value in self.__data__.items() if value and column not in ( "FTSJobID", "LastUpdate" ) ] colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) query = [] if self.FTSJobID: query.append( "UPDATE `FTSJob` SET " ) query.append( ",".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `FTSJobID`=%d;\n" % self.FTSJobID ) else: query.append( "INSERT INTO `FTSJob` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;" % values ) return S_OK( "".join( query ) ) def toJSON( self ): """ dump to JSON format """ digest = dict( zip( self.__data__.keys(), [ str( val ) if val else "" for val in self.__data__.values() ] ) ) digest["FTSFiles"] = [] for ftsFile in self: fileJSON = ftsFile.toJSON() if not fileJSON["OK"]: return fileJSON digest["FTSFiles"].append( fileJSON["Value"] ) return S_OK( digest )
class Request( Record ): """ .. class:: Request :param int RequestID: requestID :param str Name: request' name :param str OwnerDN: request's owner DN :param str OwnerGroup: request owner group :param str Setup: DIRAC setup :param str SourceComponent: whatever :param int JobID: jobID :param datetime.datetime CreationTime: UTC datetime :param datetime.datetime SubmissionTime: UTC datetime :param datetime.datetime LastUpdate: UTC datetime :param str Status: request's status :param TypedList operations: list of operations """ ALL_STATES = ( "Waiting", "Failed", "Done", "Scheduled", "Assigned", "Canceled" ) FINAL_STATES = ( "Done", "Failed", "Canceled" ) def __init__( self, fromDict = None ): """c'tor :param self: self reference """ Record.__init__( self ) self.__waiting = None now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Done" self.__data__["JobID"] = 0 self.__data__["RequestID"] = 0 proxyInfo = getProxyInfo() if proxyInfo["OK"]: proxyInfo = proxyInfo["Value"] if proxyInfo["validGroup"] and proxyInfo["validDN"]: self.OwnerDN = proxyInfo["identity"] self.OwnerGroup = proxyInfo["group"] self.__dirty = [] self.__operations__ = TypedList( allowedTypes = Operation ) fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for opDict in fromDict.get( "Operations", [] ): self +=Operation( opDict ) if "Operations" in fromDict: del fromDict["Operations"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Request attribute '%s'" % key ) if value: setattr( self, key, value ) self._notify() @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "RequestID" : "INTEGER NOT NULL AUTO_INCREMENT", "RequestName" : "VARCHAR(255) NOT NULL", "OwnerDN" : "VARCHAR(255)", "OwnerGroup" : "VARCHAR(32)", "Status" : "ENUM('Waiting', 'Assigned', 'Done', 'Failed', 'Canceled', 'Scheduled') DEFAULT 'Waiting'", "Error" : "VARCHAR(255)", "DIRACSetup" : "VARCHAR(32)", "SourceComponent" : "BLOB", "JobID" : "INTEGER DEFAULT 0", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "RequestID" ], 'UniqueIndexes': {'RequestName' : [ 'RequestName'] } } def _notify( self ): """ simple state machine for sub request statuses """ self.__waiting = None # # update operations statuses rStatus = "Waiting" opStatusList = [ ( op.Status, op ) for op in self ] self.__waiting = None while opStatusList: # # Scan all status in order! opStatus, op = opStatusList.pop( 0 ) # # Failed -> Failed if opStatus == "Failed": rStatus = "Failed" break # Scheduled -> Scheduled if opStatus == "Scheduled": if self.__waiting == None: self.__waiting = op rStatus = "Scheduled" # # First operation Queued becomes Waiting if no Waiting/Scheduled before elif opStatus == "Queued": if self.__waiting == None: self.__waiting = op op._setWaiting( self ) rStatus = "Waiting" # # First operation Waiting is next to execute, others are queued elif opStatus == "Waiting": rStatus = "Waiting" if self.__waiting == None: self.__waiting = op else: op._setQueued( self ) # # All operations Done -> Done elif opStatus == "Done" and self.__waiting == None: rStatus = "Done" self.__data__['Error'] = '' self.Status = rStatus def getWaiting( self ): """ get waiting operation if any """ # # update states self._notify() return S_OK( self.__waiting ) # # Operation arithmetics def __contains__( self, operation ): """ in operator :param self: self reference :param Operation subRequest: a subRequest """ return bool( operation in self.__operations__ ) def __iadd__( self, operation ): """ += operator for subRequest :param self: self reference :param Operation operation: sub-request to add """ if operation not in self: self.__operations__.append( operation ) operation._parent = self self._notify() return self def insertBefore( self, newOperation, existingOperation ): """ insert :newOperation: just before :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: previous Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ), newOperation ) newOperation._parent = self self._notify() return S_OK() def insertAfter( self, newOperation, existingOperation ): """ insert :newOperation: just after :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: next Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ) + 1, newOperation ) newOperation._parent = self self._notify() return S_OK() def addOperation( self, operation ): """ add :operation: to list of Operations :param self: self reference :param Operation operation: Operation to be inserted """ if operation in self: return S_ERROR( "This operation is already in!!!" ) self +=operation return S_OK() def isEmpty( self ): """ Evaluate if the request is empty """ return len( self.__operations__ ) == 0 def __iter__( self ): """ iterator for sub-request """ return self.__operations__.__iter__() def __getitem__( self, i ): """ [] op for sub requests """ return self.__operations__.__getitem__( i ) def __setitem__( self, i, value ): """ self[i] = val """ self.__operations__._typeCheck( value ) if self[i].OperationID: self.__dirty.append( self[i].OperationID ) self.__operations__.__setitem__( i, value ) value._parent = self self._notify() def __delitem__( self, i ): """ del self[i]""" if not self.RequestID: self.__operations__.__delitem__( i ) else: opId = self[i].OperationID if opId: self.__dirty.append( opId ) self.__operations__.__delitem__( i ) self._notify() def indexOf( self, subReq ): """ return index of subReq (execution order) """ return self.__operations__.index( subReq ) if subReq in self else -1 def __nonzero__( self ): """ for comparisons """ return True def __len__( self ): """ nb of subRequests """ return len( self.__operations__ ) def __str__( self ): """ str operator """ return str( self.toJSON()["Value"] ) def subStatusList( self ): """ list of statuses for all operations """ return [ subReq.Status for subReq in self ] # # properties @property def RequestID( self ): """ request ID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ requestID setter (shouldn't be RO???) """ self.__data__["RequestID"] = long( value ) if value else 0 @property def RequestName( self ): """ request's name getter """ return self.__data__["RequestName"] @RequestName.setter def RequestName( self, value ): """ request name setter """ if type( value ) != str: raise TypeError( "RequestName should be a string" ) self.__data__["RequestName"] = value[:128] @property def OwnerDN( self ): """ request owner DN getter """ return self.__data__["OwnerDN"] @OwnerDN.setter def OwnerDN( self, value ): """ request owner DN setter """ if type( value ) != str: raise TypeError( "OwnerDN should be a string!" ) self.__data__["OwnerDN"] = value @property def OwnerGroup( self ): """ request owner group getter """ return self.__data__["OwnerGroup"] @OwnerGroup.setter def OwnerGroup( self, value ): """ request owner group setter """ if type( value ) != str: raise TypeError( "OwnerGroup should be a string!" ) self.__data__["OwnerGroup"] = value @property def DIRACSetup( self ): """ DIRAC setup getter """ return self.__data__["DIRACSetup"] @DIRACSetup.setter def DIRACSetup( self, value ): """ DIRAC setup setter """ if type( value ) != str: raise TypeError( "setup should be a string!" ) self.__data__["DIRACSetup"] = value @property def SourceComponent( self ): """ source component getter """ return self.__data__["SourceComponent"] @SourceComponent.setter def SourceComponent( self, value ): """ source component setter """ if type( value ) != str: raise TypeError( "Setup should be a string!" ) self.__data__["SourceComponent"] = value @property def JobID( self ): """ jobID getter """ return self.__data__["JobID"] @JobID.setter def JobID( self, value = 0 ): """ jobID setter """ self.__data__["JobID"] = long( value ) if value else 0 @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def Status( self ): """ status getter """ self._notify() return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ if value not in Request.ALL_STATES: raise ValueError( "Unknown status: %s" % str( value ) ) # If the status moved to Failed or Done, update the lastUpdate time if value in ( 'Done', 'Failed' ): if value != self.__data__["Status"]: self.LastUpdate = datetime.datetime.utcnow().replace( microsecond = 0 ) if value == 'Done': self.__data__['Error'] = '' self.__data__["Status"] = value @property def Order( self ): """ ro execution order getter """ self._notify() opStatuses = [ op.Status for op in self.__operations__ ] return opStatuses.index( "Waiting" ) if "Waiting" in opStatuses else len( opStatuses ) @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, value ): """ error setter """ if type( value ) != str: raise TypeError( "Error has to be a string!" ) self.__data__["Error"] = self._escapeStr( value, 255 ) def toSQL( self ): """ prepare SQL INSERT or UPDATE statement """ colVals = [ ( "`%s`" % column, "'%s'" % value if type( value ) in ( str, datetime.datetime ) else str( value ) if value != None else "NULL" ) for column, value in self.__data__.items() if ( column == 'Error' or value ) and column not in ( "RequestID", "LastUpdate" ) ] colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) query = [] if self.RequestID: query.append( "UPDATE `Request` SET " ) query.append( ", ".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `RequestID`=%d;\n" % self.RequestID ) else: query.append( "INSERT INTO `Request` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;" % values ) return S_OK( "".join( query ) ) def cleanUpSQL( self ): """ delete query for dirty operations """ query = [] if self.RequestID and self.__dirty: opIDs = ",".join( [ str( opID ) for opID in self.__dirty ] ) query.append( "DELETE FROM `Operation` WHERE `RequestID`=%s AND `OperationID` IN (%s);\n" % ( self.RequestID, opIDs ) ) for opID in self.__dirty: query.append( "DELETE FROM `File` WHERE `OperationID`=%s;\n" % opID ) return query # # digest def toJSON( self ): """ serialize to JSON format """ digest = dict( [( key, str( getattr( self, key ) ) if getattr( self, key ) else "" ) for key in self.__data__] ) digest["RequestID"] = self.RequestID digest["__dirty"] = self.__dirty digest["Operations"] = [op.toJSON()['Value'] for op in self] return S_OK( digest ) def getDigest( self ): """ return digest for request """ digest = ['Name:' + self.RequestName] for op in self: opDigest = [ str( item ) for item in ( op.Type, op.Type, op.Status, op.Order ) ] if op.TargetSE: opDigest.append( op.TargetSE ) if op.Catalog: opDigest.append( op.Catalog ) if len( op ): opFile = op[0] opDigest.append( opFile.LFN ) opDigest.append( ",...<%d files>" % len( op ) ) digest.append( ":".join( opDigest ) ) return S_OK( "\n".join( digest ) ) def optimize( self ): """ Merges together the operations that can be merged. They need to have the following arguments equal: * Type * Arguments * SourceSE * TargetSE * Catalog It also makes sure that the maximum number of Files in an Operation is never overcome. CAUTION: this method is meant to be called before inserting into the DB. So if the RequestId is not 0, we don't touch :return S_ERROR if the Request should not be optimized (because already in the DB S_OK(True) if a optimization was carried out S_OK(False) if no optimization were carried out """ # Set to True if the request could be optimized optimized = False # List of attributes that must be equal for operations to be merged attrList = ["Type", "Arguments", "SourceSE", "TargetSE", "Catalog" ] i = 0 # If the RequestID is not the default one (0), it probably means # the Request is already in the DB, so we don't touch anything if self.RequestID != 0: return S_ERROR( "Cannot optimize because Request seems to be already in the DB (RequestID %s)" % self.RequestID ) # We could do it with a single loop (the 2nd one), but by doing this, # we can replace # i += 1 # continue # # with # break # # which is nicer in my opinion while i < len( self.__operations__ ): while ( i + 1 ) < len( self.__operations__ ): # Some attributes need to be the same attrMismatch = False for attr in attrList: if getattr( self.__operations__[i], attr ) != getattr( self.__operations__[i + 1], attr ): attrMismatch = True break if attrMismatch: break # We do not do the merge if there are common files in the operations fileSetA = set( list( f.LFN for f in self.__operations__[i] ) ) fileSetB = set( list( f.LFN for f in self.__operations__[i + 1] ) ) if len( fileSetA & fileSetB ): break # There is a maximum number of files one can add into an operation try: while len( self.__operations__[i + 1] ): self.__operations__[i] += self.__operations__[i + 1][0] del self.__operations__[i + 1][0] optimized = True del self.__operations__[i + 1] except RuntimeError: i += 1 i += 1 return S_OK( optimized )
class Operation(Record): """ .. class:: Operation :param long OperationID: OperationID as read from DB backend :param long RequestID: parent RequestID :param str Status: execution status :param str Type: operation to perform :param str Arguments: additional arguments :param str SourceSE: source SE name :param str TargetSE: target SE names as comma separated list :param str Catalog: catalog to use as comma separated list :param str Error: error string if any :param Request parent: parent Request instance """ # # max files in a single operation MAX_FILES = 10000 # # all states ALL_STATES = ("Queued", "Waiting", "Scheduled", "Assigned", "Failed", "Done", "Canceled") # # final states FINAL_STATES = ("Failed", "Done", "Canceled") def __init__(self, fromDict=None): """ c'tor :param self: self reference :param dict fromDict: attributes dictionary """ Record.__init__(self) self._parent = None # # sub-request attributes # self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["CreationTime"] = now self.__data__["OperationID"] = 0 self.__data__["RequestID"] = 0 self.__data__["Status"] = "Queued" # # operation files self.__files__ = TypedList(allowedTypes=File) # # dirty fileIDs self.__dirty = [] # # init from dict fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get("__dirty", []) if "__dirty" in fromDict: del fromDict["__dirty"] for fileDict in fromDict.get("Files", []): self.addFile(File(fileDict)) if "Files" in fromDict: del fromDict["Files"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown Operation attribute '%s'" % key) if key != "Order" and value: setattr(self, key, value) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "OperationID" : "INTEGER NOT NULL AUTO_INCREMENT", "RequestID" : "INTEGER NOT NULL", "Type" : "VARCHAR(64) NOT NULL", "Status" : "ENUM('Waiting', 'Assigned', 'Queued', 'Done', 'Failed', 'Canceled', 'Scheduled') "\ "DEFAULT 'Queued'", "Arguments" : "MEDIUMBLOB", "Order" : "INTEGER NOT NULL", "SourceSE" : "VARCHAR(255)", "TargetSE" : "VARCHAR(255)", "Catalog" : "VARCHAR(255)", "Error": "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, 'ForeignKeys': {'RequestID': 'Request.RequestID' }, "PrimaryKey" : "OperationID" } # # protected methods for parent only def _notify(self): """ notify self about file status change """ fStatus = set(self.fileStatusList()) if fStatus == set(['Failed']): # All files Failed -> Failed newStatus = 'Failed' elif 'Scheduled' in fStatus: newStatus = 'Scheduled' elif "Waiting" in fStatus: newStatus = 'Queued' elif 'Failed' in fStatus: newStatus = 'Failed' else: self.__data__['Error'] = '' newStatus = 'Done' # If the status moved to Failed or Done, update the lastUpdate time if newStatus in ('Failed', 'Done', 'Scheduled'): if self.__data__["Status"] != newStatus: self.LastUpdate = datetime.datetime.utcnow().replace( microsecond=0) self.__data__["Status"] = newStatus if self._parent: self._parent._notify() def _setQueued(self, caller): """ don't touch """ if caller == self._parent: self.__data__["Status"] = "Queued" def _setWaiting(self, caller): """ don't touch as well """ if caller == self._parent: self.__data__["Status"] = "Waiting" # # Files arithmetics def __contains__(self, opFile): """ in operator """ return opFile in self.__files__ def __iadd__(self, opFile): """ += operator """ if len(self) >= Operation.MAX_FILES: raise RuntimeError("too many Files in a single Operation") self.addFile(opFile) return self def addFile(self, opFile): """ add :opFile: to operation """ if len(self) >= Operation.MAX_FILES: raise RuntimeError("too many Files in a single Operation") if opFile not in self: self.__files__.append(opFile) opFile._parent = self self._notify() # # helpers for looping def __iter__(self): """ files iterator """ return self.__files__.__iter__() def __getitem__(self, i): """ [] op for opFiles """ return self.__files__.__getitem__(i) def __delitem__(self, i): """ remove file from op, only if OperationID is NOT set """ if not self.OperationID: self.__files__.__delitem__(i) else: if self[i].FileID: self.__dirty.append(self[i].FileID) self.__files__.__delitem__(i) self._notify() def __setitem__(self, i, opFile): """ overwrite opFile """ self.__files__._typeCheck(opFile) toDelete = self[i] if toDelete.FileID: self.__dirty.append(toDelete.FileID) self.__files__.__setitem__(i, opFile) opFile._parent = self self._notify() def fileStatusList(self): """ get list of files statuses """ return [subFile.Status for subFile in self] def __nonzero__(self): """ for comparisons """ return True def __len__(self): """ nb of subFiles """ return len(self.__files__) # # properties @property def RequestID(self): """ RequestID getter (RO) """ return self._parent.RequestID if self._parent else -1 @RequestID.setter def RequestID(self, value): """ can't set RequestID by hand """ self.__data__[ "RequestID"] = self._parent.RequestID if self._parent else -1 @property def OperationID(self): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID(self, value): """ OperationID setter """ self.__data__["OperationID"] = long(value) if value else 0 @property def Type(self): """ operation type prop """ return self.__data__["Type"] @Type.setter def Type(self, value): """ operation type setter """ self.__data__["Type"] = str(value) @property def Arguments(self): """ arguments getter """ return self.__data__["Arguments"] @Arguments.setter def Arguments(self, value): """ arguments setter """ self.__data__["Arguments"] = value if value else "" @property def SourceSE(self): """ source SE prop """ return self.__data__["SourceSE"] if self.__data__["SourceSE"] else "" @SourceSE.setter def SourceSE(self, value): """ source SE setter """ value = ",".join(self._uniqueList(value)) if len(value) > 256: raise ValueError("SourceSE list too long") self.__data__["SourceSE"] = str(value)[:255] if value else "" @property def sourceSEList(self): """ helper property returning source SEs as a list""" return self.SourceSE.split(",") @property def TargetSE(self): """ target SE prop """ return self.__data__["TargetSE"] if self.__data__["TargetSE"] else "" @TargetSE.setter def TargetSE(self, value): """ target SE setter """ value = ",".join(self._uniqueList(value)) if len(value) > 256: raise ValueError("TargetSE list too long") self.__data__["TargetSE"] = value[:255] if value else "" @property def targetSEList(self): """ helper property returning target SEs as a list""" return self.TargetSE.split(",") @property def Catalog(self): """ catalog prop """ return self.__data__["Catalog"] @Catalog.setter def Catalog(self, value): """ catalog setter """ # FIXME ######### THIS IS A TEMPORARY HOT FIX MEANT TO SMOOTH THE LFC->DFC MIGRATION if value == "LcgFileCatalogCombined": value = "FileCatalog,LcgFileCatalogCombined" ########################################################################### value = ",".join(self._uniqueList(value)) if len(value) > 255: raise ValueError("Catalog list too long") self.__data__["Catalog"] = value if value else "" @property def catalogList(self): """ helper property returning catalogs as list """ return self.__data__["Catalog"].split(",") @property def Error(self): """ error prop """ return self.__data__["Error"] @Error.setter def Error(self, value): """ error setter """ if type(value) != str: raise TypeError("Error has to be a string!") self.__data__["Error"] = self._escapeStr(value[:240], 255) @property def Status(self): """ Status prop """ return self.__data__["Status"] @Status.setter def Status(self, value): """ Status setter """ if value not in Operation.ALL_STATES: raise ValueError("unknown Status '%s'" % str(value)) if self.__files__: self._notify() else: # If the status moved to Failed or Done, update the lastUpdate time if value in ('Failed', 'Done'): if self.__data__["Status"] != value: self.LastUpdate = datetime.datetime.utcnow().replace( microsecond=0) self.__data__["Status"] = value if self._parent: self._parent._notify() if self.__data__['Status'] == 'Done': self.__data__['Error'] = '' @property def Order(self): """ order prop """ if self._parent: self.__data__["Order"] = self._parent.indexOf( self) if self._parent else -1 return self.__data__["Order"] @property def CreationTime(self): """ operation creation time prop """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime(self, value=None): """ creation time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("CreationTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["CreationTime"] = value @property def SubmitTime(self): """ subrequest's submit time prop """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime(self, value=None): """ submit time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("SubmitTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["SubmitTime"] = value @property def LastUpdate(self): """ last update prop """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate(self, value=None): """ last update setter """ if type(value) not in (datetime.datetime, str): raise TypeError("LastUpdate should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["LastUpdate"] = value if self._parent: self._parent.LastUpdate = value def __str__(self): """ str operator """ return str(self.toJSON()["Value"]) def toSQL(self): """ get SQL INSERT or UPDATE statement """ if not getattr(self, "RequestID"): raise AttributeError("RequestID not set") colVals = [ ("`%s`" % column, "'%s'" % getattr(self, column) if type(getattr(self, column)) in (str, datetime.datetime) else str(getattr(self, column)) if getattr(self, column) != None else "NULL") for column in self.__data__ if (column == 'Error' or getattr(self, column)) and column not in ( "OperationID", "LastUpdate", "Order") ] colVals.append(("`LastUpdate`", "UTC_TIMESTAMP()")) colVals.append(("`Order`", str(self.Order))) # colVals.append( ( "`Status`", "'%s'" % str(self.Status) ) ) query = [] if self.OperationID: query.append("UPDATE `Operation` SET ") query.append(", ".join(["%s=%s" % item for item in colVals])) query.append(" WHERE `OperationID`=%d;\n" % self.OperationID) else: query.append("INSERT INTO `Operation` ") columns = "(%s)" % ",".join([column for column, value in colVals]) values = "(%s)" % ",".join([value for column, value in colVals]) query.append(columns) query.append(" VALUES %s;\n" % values) return S_OK("".join(query)) def cleanUpSQL(self): """ query deleting dirty records from File table """ if self.OperationID and self.__dirty: fIDs = ",".join([str(fid) for fid in self.__dirty]) return "DELETE FROM `File` WHERE `OperationID` = %s AND `FileID` IN (%s);\n" % ( self.OperationID, fIDs) def toJSON(self): """ get json digest """ digest = dict([(key, str(getattr(self, key)) if getattr(self, key) else "") for key in self.__data__]) digest["RequestID"] = str(self.RequestID) digest["Order"] = str(self.Order) if self.__dirty: digest["__dirty"] = self.__dirty digest["Files"] = [opFile.toJSON()['Value'] for opFile in self] return S_OK(digest)
class Operation( Record ): """ .. class:: Operation :param long OperationID: OperationID as read from DB backend :param long RequestID: parent RequestID :param str Status: execution status :param str Type: operation to perform :param str Arguments: additional arguments :param str SourceSE: source SE name :param str TargetSE: target SE names as comma separated list :param str Catalog: catalog to use as comma separated list :param str Error: error string if any :param Request parent: parent Request instance """ # # max files in a single operation MAX_FILES = 100 # # all states ALL_STATES = ( "Queued", "Waiting", "Scheduled", "Assigned", "Failed", "Done", "Canceled" ) # # final states FINAL_STATES = ( "Failed", "Done", "Canceled" ) def __init__( self, fromDict = None ): """ c'tor :param self: self reference :param dict fromDict: attributes dictionary """ Record.__init__( self ) self._parent = None # # sub-request attributes # self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["CreationTime"] = now self.__data__["OperationID"] = 0 self.__data__["RequestID"] = 0 self.__data__["Status"] = "Queued" # # operation files self.__files__ = TypedList( allowedTypes = File ) # # dirty fileIDs self.__dirty = [] # # init from dict fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for fileDict in fromDict.get( "Files", [] ): self.addFile( File( fileDict ) ) if "Files" in fromDict: del fromDict["Files"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Operation attribute '%s'" % key ) if key != "Order" and value: setattr( self, key, value ) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "OperationID" : "INTEGER NOT NULL AUTO_INCREMENT", "RequestID" : "INTEGER NOT NULL", "Type" : "VARCHAR(64) NOT NULL", "Status" : "ENUM('Waiting', 'Assigned', 'Queued', 'Done', 'Failed', 'Canceled', 'Scheduled') "\ "DEFAULT 'Queued'", "Arguments" : "MEDIUMBLOB", "Order" : "INTEGER NOT NULL", "SourceSE" : "VARCHAR(255)", "TargetSE" : "VARCHAR(255)", "Catalog" : "VARCHAR(255)", "Error": "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : "OperationID" } # # protected methods for parent only def _notify( self ): """ notify self about file status change """ fStatus = list( set( self.fileStatusList() ) ) newStatus = "Done" while sorted( fStatus ): status = fStatus.pop( 0 ) # # one file Failed -> Failed if status == "Failed": newStatus = "Failed" break elif status == "Scheduled": if newStatus == "Done": newStatus = "Scheduled" continue elif status == "Waiting": newStatus = "Queued" continue elif status == "Done": continue self.__data__["Status"] = newStatus if self._parent: self._parent._notify() def _setQueued( self, caller ): """ don't touch """ if caller == self._parent: self.__data__["Status"] = "Queued" def _setWaiting( self, caller ): """ don't touch as well """ if caller == self._parent: self.__data__["Status"] = "Waiting" # # Files arithmetics def __contains__( self, opFile ): """ in operator """ return opFile in self.__files__ def __iadd__( self, opFile ): """ += operator """ if len( self ) >= Operation.MAX_FILES: raise RuntimeError( "too many Files in a single Operation" ) self.addFile( opFile ) return self def addFile( self, opFile ): """ add :opFile: to operation """ if len( self ) > Operation.MAX_FILES: raise RuntimeError( "too many Files in a single Operation" ) if opFile not in self: self.__files__.append( opFile ) opFile._parent = self self._notify() # # helpers for looping def __iter__( self ): """ files iterator """ return self.__files__.__iter__() def __getitem__( self, i ): """ [] op for opFiles """ return self.__files__.__getitem__( i ) def __delitem__( self, i ): """ remove file from op, only if OperationID is NOT set """ if not self.OperationID: self.__files__.__delitem__( i ) else: if self[i].FileID: self.__dirty.append( self[i].FileID ) self.__files__.__delitem__( i ) self._notify() def __setitem__( self, i, opFile ): """ overwrite opFile """ toDelete = self[i] if toDelete.FileID: self.__dirty.append( toDelete.FileID ) self.__files__.__setitem__( i, opFile ) opFile._parent = self self._notify() def fileStatusList( self ): """ get list of files statuses """ return [ subFile.Status for subFile in self ] def __len__( self ): """ nb of subFiles """ return len( self.__files__ ) # # properties @property def RequestID( self ): """ RequestID getter (RO) """ return self._parent.RequestID if self._parent else -1 @RequestID.setter def RequestID( self, value ): """ can't set RequestID by hand """ self.__data__["RequestID"] = self._parent.RequestID if self._parent else -1 @property def OperationID( self ): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID( self, value ): """ OperationID setter """ self.__data__["OperationID"] = long( value ) if value else 0 @property def Type( self ): """ operation type prop """ return self.__data__["Type"] @Type.setter def Type( self, value ): """ operation type setter """ self.__data__["Type"] = str( value ) @property def Arguments( self ): """ arguments getter """ return self.__data__["Arguments"] @Arguments.setter def Arguments( self, value ): """ arguments setter """ self.__data__["Arguments"] = value if value else "" @property def SourceSE( self ): """ source SE prop """ return self.__data__["SourceSE"] if self.__data__["SourceSE"] else "" @SourceSE.setter def SourceSE( self, value ): """ source SE setter """ value = ",".join( self._uniqueList( value ) ) if len( value ) > 256: raise ValueError( "SourceSE list too long" ) self.__data__["SourceSE"] = str( value )[:255] if value else "" @property def sourceSEList( self ): """ helper property returning source SEs as a list""" return self.SourceSE.split( "," ) @property def TargetSE( self ): """ target SE prop """ return self.__data__["TargetSE"] if self.__data__["TargetSE"] else "" @TargetSE.setter def TargetSE( self, value ): """ target SE setter """ value = ",".join( self._uniqueList( value ) ) if len( value ) > 256: raise ValueError( "TargetSE list too long" ) self.__data__["TargetSE"] = value[:255] if value else "" @property def targetSEList( self ): """ helper property returning target SEs as a list""" return self.TargetSE.split( "," ) @property def Catalog( self ): """ catalog prop """ return self.__data__["Catalog"] @Catalog.setter def Catalog( self, value ): """ catalog setter """ value = ",".join( self._uniqueList( value ) ) if len( value ) > 255: raise ValueError( "Catalog list too long" ) self.__data__["Catalog"] = value if value else "" @property def catalogList( self ): """ helper property returning catalogs as list """ return self.__data__["Catalog"].split( "," ) @property def Error( self ): """ error prop """ return self.__data__["Error"] @Error.setter def Error( self, value ): """ error setter """ if type( value ) != str: raise TypeError( "Error has to be a string!" ) self.__data__["Error"] = self._escapeStr( value, 255 ) @property def Status( self ): """ Status prop """ return self.__data__["Status"] @Status.setter def Status( self, value ): """ Status setter """ if value not in Operation.ALL_STATES: raise ValueError( "unknown Status '%s'" % str( value ) ) if self.__files__: self._notify() else: self.__data__["Status"] = value if self._parent: self._parent._notify() @property def Order( self ): """ order prop """ if self._parent: self.__data__["Order"] = self._parent.indexOf( self ) if self._parent else -1 return self.__data__["Order"] @property def CreationTime( self ): """ operation creation time prop """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ subrequest's submit time prop """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submit time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update prop """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value def __str__( self ): """ str operator """ return str( self.toJSON()["Value"] ) def toSQL( self ): """ get SQL INSERT or UPDATE statement """ if not getattr( self, "RequestID" ): raise AttributeError( "RequestID not set" ) colVals = [ ( "`%s`" % column, "'%s'" % getattr( self, column ) if type( getattr( self, column ) ) in ( str, datetime.datetime ) else str( getattr( self, column ) ) ) for column in self.__data__ if getattr( self, column ) and column not in ( "OperationID", "LastUpdate", "Order" ) ] colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) colVals.append( ( "`Order`", str( self.Order ) ) ) # colVals.append( ( "`Status`", "'%s'" % str(self.Status) ) ) query = [] if self.OperationID: query.append( "UPDATE `Operation` SET " ) query.append( ", ".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `OperationID`=%d;\n" % self.OperationID ) else: query.append( "INSERT INTO `Operation` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;\n" % values ) return S_OK( "".join( query ) ) def cleanUpSQL( self ): """ query deleting dirty records from File table """ if self.OperationID and self.__dirty: fIDs = ",".join( [ str( fid ) for fid in self.__dirty ] ) return "DELETE FROM `File` WHERE `OperationID` = %s AND `FileID` IN (%s);\n" % ( self.OperationID, fIDs ) def toJSON( self ): """ get json digest """ digest = dict( zip( self.__data__.keys(), [ str( val ) if val else "" for val in self.__data__.values() ] ) ) digest["RequestID"] = str( self.RequestID ) digest["Order"] = str( self.Order ) if self.__dirty: digest["__dirty"] = self.__dirty digest["Files"] = [] for opFile in self: opJSON = opFile.toJSON() if not opJSON["OK"]: return opJSON digest["Files"].append( opJSON["Value"] ) return S_OK( digest )
class FTSJob(Record): """ .. class:: FTSJob class describing one FTS job """ # # initial states INITSTATES = ("Submitted", "Ready", "Staging") # # ongoing transfer states TRANSSTATES = ("Active", "Hold") # # failed states FAILEDSTATES = ("Canceled", "Failed") # # finished FINALSTATES = ("Finished", "FinishedDirty", "Failed", "Canceled") # # missing source regexp patterns missingSourceErrors = [ re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[INVALID_PATH\] Failed" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[INVALID_PATH\] No such file or directory" ), re.compile( r"SOURCE error during PREPARATION phase: \[INVALID_PATH\] Failed" ), re.compile( r"SOURCE error during PREPARATION phase: \[INVALID_PATH\] The requested file either does not exist" ), re.compile( r"TRANSFER error during TRANSFER phase: \[INVALID_PATH\] the server sent an error response: 500 500"\ " Command failed. : open error: No such file or directory" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[USER_ERROR\] source file doesnt exist" ) ] def __init__(self, fromDict=None): """c'tor :param self: self reference :param dict fromDict: data dict """ Record.__init__(self) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList(allowedTypes=FTSFile) self._fc = FileCatalog() self._log = gLogger.getSubLogger("FTSJob-%s" % self.FTSJobID, True) self._states = tuple( set(self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES)) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get("FTSFiles", []): self += FTSFile(ftsFileDict) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown FTSJob attribute '%s'" % key) if value: setattr(self, key, value) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "FTSJobID" : "INTEGER NOT NULL AUTO_INCREMENT", "FTSGUID" : "VARCHAR(64) NOT NULL", "OperationID": "INTEGER NOT NULL", "RequestID": "INTEGER NOT NULL", "SourceSE" : "VARCHAR(128) NOT NULL", "TargetSE" : "VARCHAR(128) NOT NULL", "FTSServer" : "VARCHAR(255) NOT NULL", "TargetToken": "VARCHAR(255)", "SourceToken": "VARCHAR(255)", "Size": "BIGINT NOT NULL", "Files": "INTEGER NOT NULL", "Completeness": "INTEGER NOT NULL DEFAULT 0", "FailedFiles": "INTEGER DEFAULT 0", "FailedSize": "INTEGER DEFAULT 0", "Status" : "ENUM( 'Submitted', 'Ready', 'Staging', 'Canceled', 'Active', 'Hold', "\ "'Failed', 'Finished', 'FinishedDirty', 'Assigned' ) DEFAULT 'Submitted'", "Error" : "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "FTSJobID" ], "Indexes" : { "FTSJobID" : [ "FTSJobID" ], "FTSGUID": [ "FTSGUID" ] } } @property def FTSJobID(self): """ FTSJobID getter """ return self.__data__["FTSJobID"] @FTSJobID.setter def FTSJobID(self, value): """ FTSJobID setter """ self.__data__["FTSJobID"] = long(value) if value else 0 @property def RequestID(self): """ RequestID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID(self, value): """ RequestID setter """ self.__data__["RequestID"] = long(value) if value else 0 @property def OperationID(self): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID(self, value): """ OperationID setter """ self.__data__["OperationID"] = long(value) if value else 0 @property def FTSGUID(self): """ FTSGUID prop """ return self.__data__["FTSGUID"] @FTSGUID.setter def FTSGUID(self, value): """ FTSGUID setter """ if value: if type(value) not in (str, unicode): raise TypeError("FTSGUID should be a string!") if not checkGuid(value): raise ValueError("'%s' is not a valid GUID!" % str(value)) self.__data__["FTSGUID"] = value @property def FTSServer(self): """ FTSServer getter """ return self.__data__["FTSServer"] @FTSServer.setter def FTSServer(self, url): """ FTSServer getter """ self.__data__["FTSServer"] = url @property def Completeness(self): """ completeness getter """ return self.__data__["Completeness"] @Completeness.setter def Completeness(self, value): """ completeness setter """ self.__data__["Completeness"] = int(value) if value else 0 @property def Error(self): """ error getter """ return self.__data__["Error"] @Error.setter def Error(self, error): """ error setter """ self.__data__["Error"] = str(error)[255:] @property def Files(self): """ nb files getter """ self.__data__["Files"] = len(self) return self.__data__["Files"] @Files.setter def Files(self, value): """ nb files setter """ self.__data__["Files"] = len(self) @property def Status(self): """ status prop """ if not self.__data__["Status"]: self.__data__["Status"] = "Waiting" return self.__data__["Status"] @Status.setter def Status(self, value): """ status setter """ value = self._normalizedStatus(value.strip()) if value not in self._states: raise ValueError("Unknown FTSJob Status: '%s'" % str(value)) self.__data__["Status"] = value @property def FailedFiles(self): """ nb failed files getter """ self.__data__["FailedFiles"] = len([ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) return self.__data__["FailedFiles"] @FailedFiles.setter def FailedFiles(self, value): """ nb failed files setter """ if value: self.__data__["FailedFiles"] = value else: self.__data__["FailedFiles"] = sum([ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) @property def Size(self): """ size getter """ # if not self.__data__["Size"]: self.__data__["Size"] = sum([ftsFile.Size for ftsFile in self]) return self.__data__["Size"] @Size.setter def Size(self, value): """ size setter """ if value: self.__data__["Size"] = value else: self.__data__["Size"] = sum([ftsFile.Size for ftsFile in self]) @property def FailedSize(self): """ size getter """ if not self.__data__["FailedSize"]: self.__data__["FailedSize"] = sum([ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) return self.__data__["FailedSize"] @FailedSize.setter def FailedSize(self, value): """ size setter """ if value: self.__data__["FailedSize"] = value else: self.__data__["FailedSize"] = sum([ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) @property def CreationTime(self): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime(self, value=None): """ creation time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("CreationTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["CreationTime"] = value @property def SubmitTime(self): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime(self, value=None): """ submission time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("SubmitTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["SubmitTime"] = value @property def LastUpdate(self): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate(self, value=None): """ last update setter """ if type(value) not in (datetime.datetime, str): raise TypeError("LastUpdate should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["LastUpdate"] = value @property def TargetSE(self): """ target SE getter """ return self.__data__["TargetSE"] @TargetSE.setter def TargetSE(self, targetSE): """ target SE setter """ self.__data__["TargetSE"] = targetSE @property def SourceSE(self): """ source SE getter """ return self.__data__["SourceSE"] @SourceSE.setter def SourceSE(self, sourceSE): """ source SE setter """ self.__data__["SourceSE"] = sourceSE @property def SourceToken(self): """ source token getter """ return self.__data__["SourceToken"] @SourceToken.setter def SourceToken(self, sourceToken): """ source SE setter """ self.__data__["SourceToken"] = sourceToken @property def TargetToken(self): """ target token getter """ return self.__data__["TargetToken"] @TargetToken.setter def TargetToken(self, targetToken): """ target SE setter """ self.__data__["TargetToken"] = targetToken # # FTSJobFiles arithmetics def __contains__(self, subFile): """ in operator """ return subFile in self.__files__ def __iadd__(self, ftsFile): """ += operator """ if ftsFile not in self: self.__files__.append(ftsFile) ftsFile._parent = self self.Files self.Size return self def __add__(self, ftsFile): """ + operator """ self += ftsFile def addFile(self, ftsFile): """ add :ftsFile: to FTS job """ self += ftsFile def subFile(self, ftsFile): """ remove ftsFile from this job """ if ftsFile in self: ftsFile._parent = None self.__files__.remove(ftsFile) # # helpers for looping def __iter__(self): """ files iterator """ return self.__files__.__iter__() def __getitem__(self, i): """ [] op for files """ return self.__files__.__getitem__(i) def __delitem__(self, i): """ del ftsJob[i] """ self.__files__.__delitem__(i) def __setitem__(self, i, ftsFile): """ ftsJob[i] = ftsFile """ self.__files__.__setitem__(i, ftsFile) def fileStatusList(self): """ get list of files statuses """ return [ftsFile.Status for ftsFile in self] def __nonzero__(self): """ for comparisons """ return True def __len__(self): """ nb of subFiles """ return len(self.__files__) def _surlPairs(self): """ create and return SURL pair file """ surls = [] for ftsFile in self: checksum = "%s:%s" % ( ftsFile.ChecksumType, ftsFile.Checksum ) if ftsFile.ChecksumType and ftsFile.Checksum else "" surls.append("%s %s %s" % (ftsFile.SourceSURL, ftsFile.TargetSURL, checksum)) return "\n".join(surls) def submitFTS2(self, command='glite-transfer-submit', pinTime=False): """ submit fts job using FTS2 client """ if self.FTSGUID: return S_ERROR("FTSJob has already been submitted") surls = self._surlPairs() if not surls: return S_ERROR("No files to submit") fd, fileName = tempfile.mkstemp() surlFile = os.fdopen(fd, 'w') surlFile.write(surls) surlFile.close() submitCommand = command.split() + \ [ "-s", self.FTSServer, "-f", fileName, "-o", "-K" ] if self.TargetToken: submitCommand += ["-t", self.TargetToken] if self.SourceToken: submitCommand += ["-S", self.SourceToken] if pinTime: submitCommand += [ "--copy-pin-lifetime", "%d" % pinTime, "--bring-online", '86400' ] submit = executeGridCommand("", submitCommand) os.remove(fileName) if not submit["OK"]: return submit returnCode, output, errStr = submit["Value"] if returnCode != 0: return S_ERROR(errStr if errStr else output) self.FTSGUID = output.replace("\n", "") self.Status = "Submitted" for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def _normalizedStatus(self, status): for st in self._states: if status.lower() == st.lower(): return st return status def monitorFTS2(self, command="glite-transfer-status", full=False): """ monitor fts job """ if not self.FTSGUID: return S_ERROR("FTSGUID not set, FTS job not submitted?") monitorCommand = command.split() + \ ["--verbose", "-s", self.FTSServer, self.FTSGUID ] if full: monitorCommand.append("-l") monitor = executeGridCommand("", monitorCommand) if not monitor["OK"]: return monitor returnCode, outputStr, errStr = monitor["Value"] # Returns a non zero status if error if returnCode != 0: if 'was not found' in outputStr and not errStr: errStr = 'Job was not found' return S_ERROR(errStr) outputStr = outputStr.replace("'", "").replace("<", "").replace(">", "") # # set FTS job status regExp = re.compile("Status:\\s+(\\S+)") # with FTS3 this can be uppercase self.Status = re.search(regExp, outputStr).group(1) statusSummary = {} # This is capitalized, even in FTS3! for state in FTSFile.ALL_STATES: regExp = re.compile("\\s+%s:\\s+(\\d+)" % state) if regExp.search(outputStr): statusSummary[state] = int( re.search(regExp, outputStr).group(1)) total = sum(statusSummary.values()) completed = sum( [statusSummary.get(state, 0) for state in FTSFile.FINAL_STATES]) self.Completeness = 100 * completed / total if total else 0 if not full: return S_OK(statusSummary) # The order of informations is not the same for glite- and fts- !!! # In order: new fts-, old fts-, glite- iExptr = None for iExptr, exptr in enumerate( ('[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Staging:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Retries:[ ]+(\\d+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)' )): regExp = re.compile(exptr, re.S) fileInfo = re.findall(regExp, outputStr) if fileInfo: break if not fileInfo: return S_ERROR("Error monitoring job (no regexp match)") for info in fileInfo: if iExptr == 0: # version >= 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries, _staging = info elif iExptr == 1: # version FTS3 < 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries = info elif iExptr == 2: # version FTS2 sourceURL, targetURL, fileStatus, _retries, reason, duration = info else: return S_ERROR('Error monitoring job (implement match %d)' % iExptr) candidateFile = None for ftsFile in self: if ftsFile.SourceSURL == sourceURL: candidateFile = ftsFile break if not candidateFile: continue # Can be uppercase for FTS3 if not candidateFile.TargetSURL: candidateFile.TargetSURL = targetURL candidateFile.Status = fileStatus candidateFile.Error = reason candidateFile._duration = duration if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match(reason): candidateFile.Error = "MissingSource" # If the staging info was present, record it if len(info) > 6: candidateFile._staging = info[6] # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def finalize(self): """ register successfully transferred files """ if self.Status not in FTSJob.FINALSTATES: return S_OK() startTime = time.time() targetSE = StorageElement(self.TargetSE) toRegister = [ ftsFile for ftsFile in self if ftsFile.Status == "Finished" ] toRegisterDict = {} for ftsFile in toRegister: pfn = returnSingleResult( targetSE.getPfnForProtocol(ftsFile.TargetSURL, protocol="SRM2", withPort=False)) if not pfn["OK"]: continue pfn = pfn["Value"] toRegisterDict[ftsFile.LFN] = {"PFN": pfn, "SE": self.TargetSE} if toRegisterDict: self._regTotal += len(toRegisterDict) register = self._fc.addReplica(toRegisterDict) self._regTime += time.time() - startTime if not register["OK"]: for ftsFile in toRegister: ftsFile.Error = "AddCatalogReplicaFailed" return register register = register["Value"] self._regSuccess += len(register.get('Successful', {})) failedFiles = register.get("Failed", {}) for ftsFile in toRegister: if ftsFile.LFN in failedFiles: ftsFile.Error = "AddCatalogReplicaFailed" return S_OK() def toSQL(self): """ prepare SQL INSERT or UPDATE statement :return: str with SQL fragment """ colVals = [ ("`%s`" % column, "'%s'" % value if type(value) in (str, datetime.datetime) else str(value)) for column, value in self.__data__.items() if value and column not in ("FTSJobID", "LastUpdate") ] colVals.append(("`LastUpdate`", "UTC_TIMESTAMP()")) query = [] if self.FTSJobID: query.append("UPDATE `FTSJob` SET ") query.append(",".join(["%s=%s" % item for item in colVals])) query.append(" WHERE `FTSJobID`=%d;\n" % self.FTSJobID) else: query.append("INSERT INTO `FTSJob` ") columns = "(%s)" % ",".join([column for column, value in colVals]) values = "(%s)" % ",".join([value for column, value in colVals]) query.append(columns) query.append(" VALUES %s;" % values) return S_OK("".join(query)) def toJSON(self): """ dump to JSON format """ digest = dict( zip(self.__data__.keys(), [str(val) if val else "" for val in self.__data__.values()])) digest["FTSFiles"] = [] for ftsFile in self: fileJSON = ftsFile.toJSON() if not fileJSON["OK"]: return fileJSON digest["FTSFiles"].append(fileJSON["Value"]) return S_OK(digest)
class Request( Record ): """ .. class:: Request :param int RequestID: requestID :param str Name: request' name :param str OwnerDN: request's owner DN :param str OwnerGroup: request owner group :param str Setup: DIRAC setup :param str SourceComponent: whatever :param int JobID: jobID :param datetime.datetime CreationTime: UTC datetime :param datetime.datetime SubmissionTime: UTC datetime :param datetime.datetime LastUpdate: UTC datetime :param str Status: request's status :param TypedList operations: list of operations """ ALL_STATES = ( "Waiting", "Failed", "Done", "Scheduled", "Assigned", "Canceled" ) FINAL_STATES = ( "Done", "Failed", "Canceled" ) def __init__( self, fromDict = None ): """c'tor :param self: self reference """ Record.__init__( self ) self.__waiting = None now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Done" self.__data__["JobID"] = 0 self.__data__["RequestID"] = 0 proxyInfo = getProxyInfo() if proxyInfo["OK"]: proxyInfo = proxyInfo["Value"] if proxyInfo["validGroup"] and proxyInfo["validDN"]: self.OwnerDN = proxyInfo["identity"] self.OwnerGroup = proxyInfo["group"] self.__dirty = [] self.__operations__ = TypedList( allowedTypes = Operation ) fromDict = fromDict if fromDict else {} self.__dirty = fromDict.get( "__dirty", [] ) if "__dirty" in fromDict: del fromDict["__dirty"] for opDict in fromDict.get( "Operations", [] ): self +=Operation( opDict ) if "Operations" in fromDict: del fromDict["Operations"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown Request attribute '%s'" % key ) if value: setattr( self, key, value ) self._notify() @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "RequestID" : "INTEGER NOT NULL AUTO_INCREMENT", "RequestName" : "VARCHAR(255) NOT NULL", "OwnerDN" : "VARCHAR(255)", "OwnerGroup" : "VARCHAR(32)", "Status" : "ENUM('Waiting', 'Assigned', 'Done', 'Failed', 'Canceled', 'Scheduled') DEFAULT 'Waiting'", "Error" : "VARCHAR(255)", "DIRACSetup" : "VARCHAR(32)", "SourceComponent" : "BLOB", "JobID" : "INTEGER DEFAULT 0", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "RequestID", "RequestName" ], "Indexes" : { "RequestName" : [ "RequestName"] } } def _notify( self ): """ simple state machine for sub request statuses """ self.__waiting = None # # update operations statuses rStatus = "Waiting" opStatusList = [ ( op.Status, op ) for op in self ] self.__waiting = None isScheduled = False isWaiting = False while opStatusList: opStatus, op = opStatusList.pop( 0 ) # # Failed -> Failed if opStatus == "Failed": rStatus = "Failed" break # Scheduled -> Scheduled if opStatus == "Scheduled": if not isWaiting: rStatus = "Scheduled" self.__waiting = op isScheduled = True continue if opStatus == "Queued": if isScheduled or isWaiting: continue else: # not isWaiting: op._setWaiting( self ) self.__waiting = op rStatus = "Waiting" isWaiting = True if opStatus == "Waiting": if isScheduled or isWaiting: op._setQueued( self ) rStatus = "Waiting" else: self.__waiting = op isWaiting = True rStatus = "Waiting" if opStatus == "Done": if isScheduled or isWaiting: continue else: rStatus = "Done" self.Status = rStatus def getWaiting( self ): """ get waiting operation if any """ # # update states self._notify() return S_OK( self.__waiting ) # # Operation arithmetics def __contains__( self, operation ): """ in operator :param self: self reference :param Operation subRequest: a subRequest """ return bool( operation in self.__operations__ ) def __iadd__( self, operation ): """ += operator for subRequest :param self: self reference :param Operation operation: sub-request to add """ if operation not in self: self.__operations__.append( operation ) operation._parent = self self._notify() return self def insertBefore( self, newOperation, existingOperation ): """ insert :newOperation: just before :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: previous Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ), newOperation ) newOperation._parent = self self._notify() return S_OK() def insertAfter( self, newOperation, existingOperation ): """ insert :newOperation: just after :existingOperation: :param self: self reference :param Operation newOperation: Operation to be inserted :param Operation existingOperation: next Operation sibling """ if existingOperation not in self: return S_ERROR( "%s is not in" % existingOperation ) if newOperation in self: return S_ERROR( "%s is already in" % newOperation ) self.__operations__.insert( self.__operations__.index( existingOperation ) + 1, newOperation ) newOperation._parent = self self._notify() return S_OK() def addOperation( self, operation ): """ add :operation: to list of Operations :param self: self reference :param Operation operation: Operation to be inserted """ if operation in self: return S_ERROR( "This operation is already in!!!" ) self +=operation return S_OK() def __iter__( self ): """ iterator for sub-request """ return self.__operations__.__iter__() def __getitem__( self, i ): """ [] op for sub requests """ return self.__operations__.__getitem__( i ) def __setitem__( self, i, value ): """ self[i] = val """ if self[i].OperationID: self.__dirty.append( self[i].OperationID ) self.__operations__.__setitem__( i, value ) value._parent = self self._notify() def __delitem__( self, i ): """ del self[i]""" if not self.RequestID: self.__operations__.__delitem__( i ) else: opId = self[i].OperationID if opId: self.__dirty.append( opId ) self.__operations__.__delitem__( i ) self._notify() def indexOf( self, subReq ): """ return index of subReq (execution order) """ return self.__operations__.index( subReq ) if subReq in self else -1 def __len__( self ): """ nb of subRequests """ return len( self.__operations__ ) def subStatusList( self ): """ list of statuses for all operations """ return [ subReq.Status for subReq in self ] # # properties @property def RequestID( self ): """ request ID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ requestID setter (shouldn't be RO???) """ self.__data__["RequestID"] = long( value ) if value else 0 @property def RequestName( self ): """ request's name getter """ return self.__data__["RequestName"] @RequestName.setter def RequestName( self, value ): """ request name setter """ if type( value ) != str: raise TypeError( "RequestName should be a string" ) self.__data__["RequestName"] = value[:128] @property def OwnerDN( self ): """ request owner DN getter """ return self.__data__["OwnerDN"] @OwnerDN.setter def OwnerDN( self, value ): """ request owner DN setter """ if type( value ) != str: raise TypeError( "OwnerDN should be a string!" ) self.__data__["OwnerDN"] = value @property def OwnerGroup( self ): """ request owner group getter """ return self.__data__["OwnerGroup"] @OwnerGroup.setter def OwnerGroup( self, value ): """ request owner group setter """ if type( value ) != str: raise TypeError( "OwnerGroup should be a string!" ) self.__data__["OwnerGroup"] = value @property def DIRACSetup( self ): """ DIRAC setup getter """ return self.__data__["DIRACSetup"] @DIRACSetup.setter def DIRACSetup( self, value ): """ DIRAC setup setter """ if type( value ) != str: raise TypeError( "setup should be a string!" ) self.__data__["DIRACSetup"] = value @property def SourceComponent( self ): """ source component getter """ return self.__data__["SourceComponent"] @SourceComponent.setter def SourceComponent( self, value ): """ source component setter """ if type( value ) != str: raise TypeError( "Setup should be a string!" ) self.__data__["SourceComponent"] = value @property def JobID( self ): """ jobID getter """ return self.__data__["JobID"] @JobID.setter def JobID( self, value = 0 ): """ jobID setter """ self.__data__["JobID"] = long( value ) if value else 0 @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def Status( self ): """ status getter """ self._notify() return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ if value not in Request.ALL_STATES: raise ValueError( "Unknown status: %s" % str( value ) ) self.__data__["Status"] = value @property def Order( self ): """ ro execution order getter """ self._notify() opStatuses = [ op.Status for op in self.__operations__ ] return opStatuses.index( "Waiting" ) if "Waiting" in opStatuses else len( opStatuses ) @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, value ): """ error setter """ if type( value ) != str: raise TypeError( "Error has to be a string!" ) self.__data__["Error"] = self._escapeStr( value, 255 ) def toSQL( self ): """ prepare SQL INSERT or UPDATE statement """ colVals = [ ( "`%s`" % column, "'%s'" % value if type( value ) in ( str, datetime.datetime ) else str( value ) ) for column, value in self.__data__.items() if value and column not in ( "RequestID", "LastUpdate" ) ] colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) query = [] if self.RequestID: query.append( "UPDATE `Request` SET " ) query.append( ", ".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `RequestID`=%d;\n" % self.RequestID ) else: query.append( "INSERT INTO `Request` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;" % values ) return S_OK( "".join( query ) ) def cleanUpSQL( self ): """ delete query for dirty operations """ query = [] if self.RequestID and self.__dirty: opIDs = ",".join( [ str( opID ) for opID in self.__dirty ] ) query.append( "DELETE FROM `Operation` WHERE `RequestID`=%s AND `OperationID` IN (%s);\n" % ( self.RequestID, opIDs ) ) for opID in self.__dirty: query.append( "DELETE FROM `File` WHERE `OperationID`=%s;\n" % opID ) return query # # digest def toJSON( self ): """ serialize to JSON format """ digest = dict( zip( self.__data__.keys(), [ str( val ) if val else "" for val in self.__data__.values() ] ) ) digest["RequestID"] = self.RequestID digest["Operations"] = [] digest["__dirty"] = self.__dirty for op in self: opJSON = op.toJSON() if not opJSON["OK"]: return opJSON digest["Operations"].append( opJSON["Value"] ) return S_OK( digest ) def getDigest( self ): """ return digest for request """ digest = [] for op in self: opDigest = [ str( item ) for item in ( op.Type, op.Type, op.Status, op.Order ) ] if op.TargetSE: opDigest.append( op.TargetSE ) if op.Catalog: opDigest.append( op.Catalog ) if len( op ): opFile = op[0] opDigest.append( opFile.LFN ) opDigest.append( ",...<%d files>" % len( op ) ) digest.append( ":".join( opDigest ) ) return S_OK( "\n".join( digest ) )
class FTSJob( object ): """ class describing one FTS job """ # # initial states INITSTATES = ( "Submitted", "Ready", "Staging" ) # # ongoing transfer states TRANSSTATES = ( "Active", "Hold" ) # # failed states FAILEDSTATES = ( "Canceled", "Failed" ) # # finished (careful, must be capitalized) FINALSTATES = ( "Finished", "Finisheddirty", "FinishedDirty", "Failed", "Canceled" ) # # missing source regexp patterns missingSourceErrors = [ re.compile( r".*INVALID_PATH\] Failed" ), re.compile( r".*INVALID_PATH\] No such file or directory" ), re.compile( r".*INVALID_PATH\] The requested file either does not exist" ), re.compile( r".*INVALID_PATH\] the server sent an error response: 500 500"\ " Command failed. : open error: No such file or directory" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[USER_ERROR\] source file doesnt exist" ) ] def __init__( self, fromDict = None ): """c'tor :param self: self reference :param dict fromDict: data dict """ self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList( allowedTypes = FTSFile ) self._fc = FileCatalog() self._states = tuple( set( self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES ) ) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get( "FTSFiles", [] ): self +=FTSFile( ftsFileDict ) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown FTSJob attribute '%s'" % key ) if value: setattr( self, key, value ) self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % ( self.RequestID, self.FTSGUID ) , True ) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "FTSJobID" : "INTEGER NOT NULL AUTO_INCREMENT", "FTSGUID" : "VARCHAR(64) NOT NULL", "OperationID": "INTEGER NOT NULL", "RequestID": "INTEGER NOT NULL", "SourceSE" : "VARCHAR(128) NOT NULL", "TargetSE" : "VARCHAR(128) NOT NULL", "FTSServer" : "VARCHAR(255) NOT NULL", "TargetToken": "VARCHAR(255)", "SourceToken": "VARCHAR(255)", "Size": "BIGINT NOT NULL", "Files": "INTEGER NOT NULL", "Completeness": "INTEGER NOT NULL DEFAULT 0", "FailedFiles": "INTEGER DEFAULT 0", "FailedSize": "INTEGER DEFAULT 0", "Status" : "ENUM( 'Submitted', 'Ready', 'Staging', 'Canceled', 'Active', 'Hold', "\ "'Failed', 'Finished', 'FinishedDirty', 'Assigned' ) DEFAULT 'Submitted'", "Error" : "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "FTSJobID" ], "Indexes" : { "FTSJobID" : [ "FTSJobID" ], "FTSGUID": [ "FTSGUID" ] } } @property def FTSJobID( self ): """ FTSJobID getter """ return self.__data__["FTSJobID"] @FTSJobID.setter def FTSJobID( self, value ): """ FTSJobID setter """ self.__data__["FTSJobID"] = long( value ) if value else 0 @property def RequestID( self ): """ RequestID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ RequestID setter """ self.__data__["RequestID"] = long( value ) if value else 0 @property def OperationID( self ): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID( self, value ): """ OperationID setter """ self.__data__["OperationID"] = long( value ) if value else 0 @property def FTSGUID( self ): """ FTSGUID prop """ return self.__data__["FTSGUID"] @FTSGUID.setter def FTSGUID( self, value ): """ FTSGUID setter """ if value: if type( value ) not in ( str, unicode ): raise TypeError( "FTSGUID should be a string!" ) if not checkGuid( value ): raise ValueError( "'%s' is not a valid GUID!" % str( value ) ) self.__data__["FTSGUID"] = value @property def FTSServer( self ): """ FTSServer getter """ return self.__data__["FTSServer"] @FTSServer.setter def FTSServer( self, url ): """ FTSServer getter """ self.__data__["FTSServer"] = url @property def Completeness( self ): """ completeness getter """ return self.__data__["Completeness"] @Completeness.setter def Completeness( self, value ): """ completeness setter """ self.__data__["Completeness"] = int( value ) if value else 0 @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, error ): """ error setter """ self.__data__["Error"] = str( error )[255:] @property def Files( self ): """ nb files getter """ self.__data__["Files"] = len( self ) return self.__data__["Files"] @Files.setter def Files( self, value ): """ nb files setter """ self.__data__["Files"] = len( self ) @property def Status( self ): """ status prop """ if not self.__data__["Status"]: self.__data__["Status"] = "Waiting" return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ value = self._normalizedStatus( value.strip() ) if value not in self._states: raise ValueError( "Unknown FTSJob Status: '%s'" % str( value ) ) self.__data__["Status"] = value @property def FailedFiles( self ): """ nb failed files getter """ self.__data__["FailedFiles"] = len( [ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) return self.__data__["FailedFiles"] @FailedFiles.setter def FailedFiles( self, value ): """ nb failed files setter """ if value: self.__data__["FailedFiles"] = value else: self.__data__["FailedFiles"] = sum( [ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) @property def Size( self ): """ size getter """ # if not self.__data__["Size"]: self.__data__["Size"] = sum( [ ftsFile.Size for ftsFile in self ] ) return self.__data__["Size"] @Size.setter def Size( self, value ): """ size setter """ if value: self.__data__["Size"] = value else: self.__data__["Size"] = sum( [ ftsFile.Size for ftsFile in self ] ) @property def FailedSize( self ): """ size getter """ if not self.__data__["FailedSize"]: self.__data__["FailedSize"] = sum( [ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) return self.__data__["FailedSize"] @FailedSize.setter def FailedSize( self, value ): """ size setter """ if value: self.__data__["FailedSize"] = value else: self.__data__["FailedSize"] = sum( [ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def TargetSE( self ): """ target SE getter """ return self.__data__["TargetSE"] @TargetSE.setter def TargetSE( self, targetSE ): """ target SE setter """ self.__data__["TargetSE"] = targetSE @property def SourceSE( self ): """ source SE getter """ return self.__data__["SourceSE"] @SourceSE.setter def SourceSE( self, sourceSE ): """ source SE setter """ self.__data__["SourceSE"] = sourceSE @property def SourceToken( self ): """ source token getter """ return self.__data__["SourceToken"] @SourceToken.setter def SourceToken( self, sourceToken ): """ source SE setter """ self.__data__["SourceToken"] = sourceToken @property def TargetToken( self ): """ target token getter """ return self.__data__["TargetToken"] @TargetToken.setter def TargetToken( self, targetToken ): """ target SE setter """ self.__data__["TargetToken"] = targetToken # # FTSJobFiles arithmetics def __contains__( self, subFile ): """ in operator """ return subFile in self.__files__ def __iadd__( self, ftsFile ): """ += operator """ if ftsFile not in self: self.__files__.append( ftsFile ) ftsFile._parent = self self.Files self.Size return self def __add__( self, ftsFile ): """ + operator """ self +=ftsFile def addFile( self, ftsFile ): """ add :ftsFile: to FTS job """ self +=ftsFile def subFile( self, ftsFile ): """ remove ftsFile from this job """ if ftsFile in self: ftsFile._parent = None self.__files__.remove( ftsFile ) # # helpers for looping def __iter__( self ): """ files iterator """ return self.__files__.__iter__() def __getitem__( self, i ): """ [] op for files """ return self.__files__.__getitem__( i ) def __delitem__( self, i ): """ del ftsJob[i] """ self.__files__.__delitem__( i ) def __setitem__( self, i, ftsFile ): """ ftsJob[i] = ftsFile """ self.__files__.__setitem__( i, ftsFile ) def fileStatusList( self ): """ get list of files statuses """ return [ ftsFile.Status for ftsFile in self ] def __nonzero__( self ): """ for comparisons """ return True def __len__( self ): """ nb of subFiles """ return len( self.__files__ ) def _surlPairs( self ): """ create and return SURL pair file """ surls = [] for ftsFile in self: checksum = "%s:%s" % ( ftsFile.ChecksumType, ftsFile.Checksum ) if ftsFile.ChecksumType and ftsFile.Checksum else "" surls.append( "%s %s %s" % ( ftsFile.SourceSURL, ftsFile.TargetSURL, checksum ) ) return "\n".join( surls ) def submitFTS2( self, command = 'glite-transfer-submit', pinTime = False ): """ submit fts job using FTS2 client """ if self.FTSGUID: return S_ERROR( "FTSJob has already been submitted" ) surls = self._surlPairs() if not surls: return S_ERROR( "No files to submit" ) fd, fileName = tempfile.mkstemp() surlFile = os.fdopen( fd, 'w' ) surlFile.write( surls ) surlFile.close() submitCommand = command.split() + \ [ "-s", self.FTSServer, "-f", fileName, "-o", "-K" ] if self.TargetToken: submitCommand += [ "-t", self.TargetToken] if self.SourceToken: submitCommand += [ "-S", self.SourceToken ] if pinTime: submitCommand += [ "--copy-pin-lifetime", "%d" % pinTime, "--bring-online", '86400' ] submit = executeGridCommand( "", submitCommand ) os.remove( fileName ) if not submit["OK"]: return submit returnCode, output, errStr = submit["Value"] if returnCode != 0: return S_ERROR( errStr if errStr else output ) self.FTSGUID = output.replace( "\n", "" ) self.Status = "Submitted" for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def _normalizedStatus( self, status ): for st in self._states: if status.lower() == st.lower(): return st return status def monitorFTS2( self, command = "glite-transfer-status", full = False ): """ monitor fts job """ if not self.FTSGUID: return S_ERROR( "FTSGUID not set, FTS job not submitted?" ) monitorCommand = command.split() + \ ["--verbose", "-s", self.FTSServer, self.FTSGUID ] if full: monitorCommand.append( "-l" ) monitor = executeGridCommand( "", monitorCommand ) if not monitor["OK"]: return monitor returnCode, outputStr, errStr = monitor["Value"] # Returns a non zero status if error if returnCode != 0: if 'was not found' in outputStr and not errStr: errStr = 'Job was not found' return S_ERROR( errStr ) outputStr = outputStr.replace( "'" , "" ).replace( "<", "" ).replace( ">", "" ) # # set FTS job status regExp = re.compile( "Status:\\s+(\\S+)" ) # with FTS3 this can be uppercase self.Status = re.search( regExp, outputStr ).group( 1 ) statusSummary = {} # This is capitalized, even in FTS3! for state in FTSFile.ALL_STATES: regExp = re.compile( "\\s+%s:\\s+(\\d+)" % state ) if regExp.search( outputStr ): statusSummary[state] = int( re.search( regExp, outputStr ).group( 1 ) ) total = sum( statusSummary.values() ) completed = sum( [ statusSummary.get( state, 0 ) for state in FTSFile.FINAL_STATES ] ) self.Completeness = 100 * completed / total if total else 0 if not full: return S_OK( statusSummary ) # The order of informations is not the same for glite- and fts- !!! # In order: new fts-, old fts-, glite- realJob = len( self ) != 0 iExptr = None for iExptr, exptr in enumerate( ( '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Staging:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Retries:[ ]+(\\d+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)' ) ): regExp = re.compile( exptr, re.S ) fileInfo = re.findall( regExp, outputStr ) if fileInfo: break if not fileInfo: return S_ERROR( "Error monitoring job (no regexp match)" ) for info in fileInfo: if iExptr == 0: # version >= 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries, _staging = info elif iExptr == 1: # version FTS3 < 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries = info elif iExptr == 2: # version FTS2 sourceURL, targetURL, fileStatus, _retries, reason, duration = info else: return S_ERROR( 'Error monitoring job (implement match %d)' % iExptr ) candidateFile = None if not realJob: # This is used by the CLI monitoring of jobs in case no file was specified candidateFile = FTSFile() candidateFile.LFN = overlap( sourceURL, targetURL ) candidateFile.SourceSURL = sourceURL candidateFile.Size = 0 self +=candidateFile else: for ftsFile in self: if ftsFile.SourceSURL == sourceURL: candidateFile = ftsFile break if not candidateFile: continue # Can be uppercase for FTS3 if not candidateFile.TargetSURL: candidateFile.TargetSURL = targetURL candidateFile.Status = fileStatus candidateFile.Error = reason candidateFile._duration = duration if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match( reason ): candidateFile.Error = "MissingSource" # If the staging info was present, record it if len( info ) > 6: candidateFile._staging = info[6] # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def submitFTS3( self, pinTime = False ): """ submit fts job using FTS3 rest API """ if self.FTSGUID: return S_ERROR( "FTSJob already has been submitted" ) transfers = [] for ftsFile in self: trans = fts3.new_transfer( ftsFile.SourceSURL, ftsFile.TargetSURL, checksum = ftsFile.Checksum, filesize = ftsFile.Size ) transfers.append( trans ) source_spacetoken = self.SourceToken if self.SourceToken else None dest_spacetoken = self.TargetToken if self.TargetToken else None copy_pin_lifetime = pinTime if pinTime else None bring_online = 86400 if pinTime else None job = fts3.new_job( transfers = transfers, overwrite = True, source_spacetoken = source_spacetoken, spacetoken = dest_spacetoken, bring_online = bring_online, copy_pin_lifetime = copy_pin_lifetime, retry = 3 ) try: context = fts3.Context( self.FTSServer ) self.FTSGUID = fts3.submit( context, job ) except Exception, e: return S_ERROR( "Error at submission: %s" % e ) self.Status = "Submitted" self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % ( self.RequestID, self.FTSGUID ) , True ) for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK()
class FTSJob( object ): """ Class describing one FTS job """ # # initial states INITSTATES = ( "Submitted", "Ready", "Staging" ) # # ongoing transfer states TRANSSTATES = ( "Active", "Hold" ) # # failed states FAILEDSTATES = ( "Canceled", "Failed" ) # # finished (careful, must be capitalized) FINALSTATES = ( "Finished", "Finisheddirty", "FinishedDirty", "Failed", "Canceled" ) # # missing source regexp patterns missingSourceErrors = [ re.compile( r".*INVALID_PATH\] Failed" ), re.compile( r".*INVALID_PATH\] No such file or directory" ), re.compile( r".*INVALID_PATH\] The requested file either does not exist" ), re.compile( r".*INVALID_PATH\] the server sent an error response: 500 500"\ " Command failed. : open error: No such file or directory" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[USER_ERROR\] source file doesnt exist" ) ] def __init__( self, fromDict = None ): """c'tor :param self: self reference :param dict fromDict: data dict """ self.__data__ = dict.fromkeys( self.tableDesc()["Fields"].keys(), None ) now = datetime.datetime.utcnow().replace( microsecond = 0 ) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList( allowedTypes = FTSFile ) self._fc = FileCatalog() self._fts3context = None self._states = tuple( set( self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES ) ) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get( "FTSFiles", [] ): self +=FTSFile( ftsFileDict ) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError( "Unknown FTSJob attribute '%s'" % key ) if value: setattr( self, key, value ) self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % ( self.RequestID, self.FTSGUID ) , True ) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "FTSJobID" : "INTEGER NOT NULL AUTO_INCREMENT", "FTSGUID" : "VARCHAR(64) NOT NULL", "OperationID": "INTEGER NOT NULL", "RequestID": "INTEGER NOT NULL", "SourceSE" : "VARCHAR(128) NOT NULL", "TargetSE" : "VARCHAR(128) NOT NULL", "FTSServer" : "VARCHAR(255) NOT NULL", "TargetToken": "VARCHAR(255)", "SourceToken": "VARCHAR(255)", "Size": "BIGINT NOT NULL", "Files": "INTEGER NOT NULL", "Completeness": "INTEGER NOT NULL DEFAULT 0", "FailedFiles": "INTEGER DEFAULT 0", "FailedSize": "INTEGER DEFAULT 0", "Status" : "ENUM( 'Submitted', 'Ready', 'Staging', 'Canceled', 'Active', 'Hold', "\ "'Failed', 'Finished', 'FinishedDirty', 'Assigned' ) DEFAULT 'Submitted'", "Error" : "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "FTSJobID" ], "Indexes" : { "FTSJobID" : [ "FTSJobID" ], "FTSGUID": [ "FTSGUID" ] } } @property def FTSJobID( self ): """ FTSJobID getter """ return self.__data__["FTSJobID"] @FTSJobID.setter def FTSJobID( self, value ): """ FTSJobID setter """ self.__data__["FTSJobID"] = long( value ) if value else 0 @property def RequestID( self ): """ RequestID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID( self, value ): """ RequestID setter """ self.__data__["RequestID"] = long( value ) if value else 0 @property def OperationID( self ): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID( self, value ): """ OperationID setter """ self.__data__["OperationID"] = long( value ) if value else 0 @property def FTSGUID( self ): """ FTSGUID prop """ return self.__data__["FTSGUID"] @FTSGUID.setter def FTSGUID( self, value ): """ FTSGUID setter """ if value: if type( value ) not in ( str, unicode ): raise TypeError( "FTSGUID should be a string!" ) if not checkGuid( value ): raise ValueError( "'%s' is not a valid GUID!" % str( value ) ) self.__data__["FTSGUID"] = value @property def FTSServer( self ): """ FTSServer getter """ return self.__data__["FTSServer"] @FTSServer.setter def FTSServer( self, url ): """ FTSServer getter """ self.__data__["FTSServer"] = url # I REALLY don't see that happening # but in case we change the server after the # context was created, I reset it # (I don't initialize because maybe we are in FTS2 mode...) self._fts3context = None @property def Completeness( self ): """ completeness getter """ return self.__data__["Completeness"] @Completeness.setter def Completeness( self, value ): """ completeness setter """ self.__data__["Completeness"] = int( value ) if value else 0 @property def Error( self ): """ error getter """ return self.__data__["Error"] @Error.setter def Error( self, error ): """ error setter """ self.__data__["Error"] = str( error )[255:] @property def Files( self ): """ nb files getter """ self.__data__["Files"] = len( self ) return self.__data__["Files"] @Files.setter def Files( self, value ): """ nb files setter """ self.__data__["Files"] = len( self ) @property def Status( self ): """ status prop """ if not self.__data__["Status"]: self.__data__["Status"] = "Waiting" return self.__data__["Status"] @Status.setter def Status( self, value ): """ status setter """ value = self._normalizedStatus( value.strip() ) if value not in self._states: raise ValueError( "Unknown FTSJob Status: '%s'" % str( value ) ) self.__data__["Status"] = value @property def FailedFiles( self ): """ nb failed files getter """ self.__data__["FailedFiles"] = len( [ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ] ) return self.__data__["FailedFiles"] @FailedFiles.setter def FailedFiles( self, value ): """ nb failed files setter """ if value: self.__data__["FailedFiles"] = value else: self.__data__["FailedFiles"] = len( [ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES] ) @property def Size( self ): """ size getter """ # if not self.__data__["Size"]: self.__data__["Size"] = sum( ftsFile.Size for ftsFile in self ) return self.__data__["Size"] @Size.setter def Size( self, value ): """ size setter """ if value: self.__data__["Size"] = value else: self.__data__["Size"] = sum( ftsFile.Size for ftsFile in self ) @property def FailedSize( self ): """ size getter """ if not self.__data__["FailedSize"]: self.__data__["FailedSize"] = sum( ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ) return self.__data__["FailedSize"] @FailedSize.setter def FailedSize( self, value ): """ size setter """ if value: self.__data__["FailedSize"] = value else: self.__data__["FailedSize"] = sum( ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ) @property def CreationTime( self ): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime( self, value = None ): """ creation time setter """ if type( value ) not in ( datetime.datetime, str ) : raise TypeError( "CreationTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["CreationTime"] = value @property def SubmitTime( self ): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime( self, value = None ): """ submission time setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "SubmitTime should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["SubmitTime"] = value @property def LastUpdate( self ): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate( self, value = None ): """ last update setter """ if type( value ) not in ( datetime.datetime, str ): raise TypeError( "LastUpdate should be a datetime.datetime!" ) if type( value ) == str: value = datetime.datetime.strptime( value.split( "." )[0], '%Y-%m-%d %H:%M:%S' ) self.__data__["LastUpdate"] = value @property def TargetSE( self ): """ target SE getter """ return self.__data__["TargetSE"] @TargetSE.setter def TargetSE( self, targetSE ): """ target SE setter """ self.__data__["TargetSE"] = targetSE @property def SourceSE( self ): """ source SE getter """ return self.__data__["SourceSE"] @SourceSE.setter def SourceSE( self, sourceSE ): """ source SE setter """ self.__data__["SourceSE"] = sourceSE @property def SourceToken( self ): """ source token getter """ return self.__data__["SourceToken"] @SourceToken.setter def SourceToken( self, sourceToken ): """ source SE setter """ self.__data__["SourceToken"] = sourceToken @property def TargetToken( self ): """ target token getter """ return self.__data__["TargetToken"] @TargetToken.setter def TargetToken( self, targetToken ): """ target SE setter """ self.__data__["TargetToken"] = targetToken # # FTSJobFiles arithmetics def __contains__( self, subFile ): """ in operator """ return subFile in self.__files__ def __iadd__( self, ftsFile ): """ += operator """ if ftsFile not in self: self.__files__.append( ftsFile ) ftsFile._parent = self self.Files self.Size return self def __add__( self, ftsFile ): """ + operator """ self +=ftsFile def addFile( self, ftsFile ): """ add :ftsFile: to FTS job """ self +=ftsFile def subFile( self, ftsFile ): """ remove ftsFile from this job """ if ftsFile in self: ftsFile._parent = None self.__files__.remove( ftsFile ) # # helpers for looping def __iter__( self ): """ files iterator """ return self.__files__.__iter__() def __getitem__( self, i ): """ [] op for files """ return self.__files__.__getitem__( i ) def __delitem__( self, i ): """ del ftsJob[i] """ self.__files__.__delitem__( i ) def __setitem__( self, i, ftsFile ): """ ftsJob[i] = ftsFile """ self.__files__.__setitem__( i, ftsFile ) def fileStatusList( self ): """ get list of files statuses """ return [ ftsFile.Status for ftsFile in self ] def __nonzero__( self ): """ for comparisons """ return True def __len__( self ): """ nb of subFiles """ return len( self.__files__ ) def _surlPairs( self ): """ create and return SURL pair file """ surls = [] for ftsFile in self: checksum = "%s:%s" % ( ftsFile.ChecksumType, ftsFile.Checksum ) if ftsFile.ChecksumType and ftsFile.Checksum else "" surls.append( "%s %s %s" % ( ftsFile.SourceSURL, ftsFile.TargetSURL, checksum ) ) return "\n".join( surls ) def submitFTS2( self, command = 'glite-transfer-submit', pinTime = False ): """ submit fts job using FTS2 client """ if self.FTSGUID: return S_ERROR( "FTSJob has already been submitted" ) surls = self._surlPairs() if not surls: return S_ERROR( "No files to submit" ) fd, fileName = tempfile.mkstemp() surlFile = os.fdopen( fd, 'w' ) surlFile.write( surls ) surlFile.close() submitCommand = command.split() + \ [ "-s", self.FTSServer, "-f", fileName, "-o", "-K" ] if self.TargetToken: submitCommand += [ "-t", self.TargetToken] if self.SourceToken: submitCommand += [ "-S", self.SourceToken ] if pinTime: submitCommand += [ "--copy-pin-lifetime", "%d" % pinTime, "--bring-online", '86400' ] submit = executeGridCommand( "", submitCommand ) os.remove( fileName ) if not submit["OK"]: return submit returnCode, output, errStr = submit["Value"] if returnCode != 0: return S_ERROR( errStr if errStr else output ) self.FTSGUID = output.replace( "\n", "" ) self.Status = "Submitted" for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def _normalizedStatus( self, status ): for st in self._states: if status.lower() == st.lower(): return st return status def monitorFTS2( self, command = "glite-transfer-status", full = False ): """ monitor fts job """ if not self.FTSGUID: return S_ERROR( "FTSGUID not set, FTS job not submitted?" ) monitorCommand = command.split() + \ ["--verbose", "-s", self.FTSServer, self.FTSGUID ] if full: monitorCommand.append( "-l" ) monitor = executeGridCommand( "", monitorCommand ) if not monitor["OK"]: return monitor returnCode, outputStr, errStr = monitor["Value"] # Returns a non zero status if error if returnCode != 0: if 'was not found' in outputStr and not errStr: errStr = 'Job was not found' return S_ERROR( errStr ) outputStr = outputStr.replace( "'" , "" ).replace( "<", "" ).replace( ">", "" ) # # set FTS job status regExp = re.compile( "Status:\\s+(\\S+)" ) # with FTS3 this can be uppercase self.Status = re.search( regExp, outputStr ).group( 1 ) statusSummary = {} # This is capitalized, even in FTS3! for state in FTSFile.ALL_STATES: regExp = re.compile( "\\s+%s:\\s+(\\d+)" % state ) if regExp.search( outputStr ): statusSummary[state] = int( re.search( regExp, outputStr ).group( 1 ) ) total = sum( statusSummary.values() ) completed = sum( statusSummary.get( state, 0 ) for state in FTSFile.FINAL_STATES ) self.Completeness = 100 * completed / total if total else 0 if not full: return S_OK( statusSummary ) # The order of informations is not the same for glite- and fts- !!! # In order: new fts-, old fts-, glite- realJob = len( self ) != 0 iExptr = None for iExptr, exptr in enumerate( ( '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Staging:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Retries:[ ]+(\\d+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)' ) ): regExp = re.compile( exptr, re.S ) fileInfo = re.findall( regExp, outputStr ) if fileInfo: break if not fileInfo: return S_ERROR( "Error monitoring job (no regexp match)" ) for info in fileInfo: if iExptr == 0: # version >= 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries, _staging = info elif iExptr == 1: # version FTS3 < 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries = info elif iExptr == 2: # version FTS2 sourceURL, targetURL, fileStatus, _retries, reason, duration = info else: return S_ERROR( 'Error monitoring job (implement match %d)' % iExptr ) candidateFile = None if not realJob: # This is used by the CLI monitoring of jobs in case no file was specified candidateFile = FTSFile() candidateFile.LFN = overlap( sourceURL, targetURL ) candidateFile.SourceSURL = sourceURL candidateFile.Size = 0 self +=candidateFile else: for ftsFile in self: if ftsFile.SourceSURL == sourceURL: candidateFile = ftsFile break if not candidateFile: continue # Can be uppercase for FTS3 if not candidateFile.TargetSURL: candidateFile.TargetSURL = targetURL candidateFile.Status = fileStatus candidateFile.Error = reason candidateFile._duration = duration if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match( reason ): candidateFile.Error = "MissingSource" # If the staging info was present, record it if len( info ) > 6: candidateFile._staging = info[6] # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def submitFTS3( self, pinTime = False ): """ submit fts job using FTS3 rest API """ if self.FTSGUID: return S_ERROR( "FTSJob already has been submitted" ) transfers = [] for ftsFile in self: trans = fts3.new_transfer( ftsFile.SourceSURL, ftsFile.TargetSURL, checksum = 'ADLER32:%s'%ftsFile.Checksum, filesize = ftsFile.Size ) transfers.append( trans ) source_spacetoken = self.SourceToken if self.SourceToken else None dest_spacetoken = self.TargetToken if self.TargetToken else None copy_pin_lifetime = pinTime if pinTime else None bring_online = 86400 if pinTime else None job = fts3.new_job( transfers = transfers, overwrite = True, source_spacetoken = source_spacetoken, spacetoken = dest_spacetoken, bring_online = bring_online, copy_pin_lifetime = copy_pin_lifetime, retry = 3 ) try: if not self._fts3context: self._fts3context = fts3.Context( endpoint = self.FTSServer, request_class = ftsSSLRequest, verify = False ) context = self._fts3context self.FTSGUID = fts3.submit( context, job ) except Exception as e: return S_ERROR( "Error at submission: %s" % e ) self.Status = "Submitted" self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % ( self.RequestID, self.FTSGUID ) , True ) for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def monitorFTS3( self, full = False ): if not self.FTSGUID: return S_ERROR( "FTSGUID not set, FTS job not submitted?" ) jobStatusDict = None try: if not self._fts3context: self._fts3context = fts3.Context( endpoint = self.FTSServer, request_class = ftsSSLRequest, verify = False ) context = self._fts3context jobStatusDict = fts3.get_job_status( context, self.FTSGUID, list_files = True ) except Exception as e: return S_ERROR( "Error getting the job status %s" % e ) self.Status = jobStatusDict['job_state'].capitalize() filesInfoList = jobStatusDict['files'] statusSummary = {} for fileDict in filesInfoList: file_state = fileDict['file_state'].capitalize() statusSummary[file_state] = statusSummary.get( file_state, 0 ) + 1 total = len( filesInfoList ) completed = sum( [ statusSummary.get( state, 0 ) for state in FTSFile.FINAL_STATES ] ) self.Completeness = 100 * completed / total if not full: return S_OK( statusSummary ) ftsFilesPrinted = False for fileDict in filesInfoList: sourceURL = fileDict['source_surl'] targetURL = fileDict['dest_surl'] fileStatus = fileDict['file_state'].capitalize() reason = fileDict['reason'] duration = fileDict['tx_duration'] candidateFile = None for ftsFile in self: if ftsFile.SourceSURL == sourceURL and ftsFile.TargetSURL == targetURL : candidateFile = ftsFile break if candidateFile is None: self._log.warn( 'FTSFile not found', 'Source: %s, Target: %s' % ( sourceURL, targetURL ) ) if not ftsFilesPrinted: ftsFilesPrinted = True if not len( self ): self._log.warn( 'Monitored FTS job is empty!' ) else: self._log.warn( 'All FTS files are:', '\n' + '\n'.join( ['Source: %s, Target: %s' % ( ftsFile.SourceSURL, ftsFile.TargetSURL ) for ftsFile in self] ) ) else: candidateFile.Status = fileStatus candidateFile.Error = reason candidateFile._duration = duration if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match( reason ): candidateFile.Error = "MissingSource" # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def monitorFTS( self, ftsVersion, command = "glite-transfer-status", full = False ): """ Wrapper calling the proper method for a given version of FTS""" if ftsVersion == "FTS2": return self.monitorFTS2( command = command, full = full ) elif ftsVersion == "FTS3": return self.monitorFTS3( full = full ) else: return S_ERROR( "monitorFTS: unknown FTS version %s" % ftsVersion ) def submitFTS( self, ftsVersion, command = 'glite-transfer-submit', pinTime = False ): """ Wrapper calling the proper method for a given version of FTS""" if ftsVersion == "FTS2": return self.submitFTS2( command = command, pinTime = pinTime ) elif ftsVersion == "FTS3": return self.submitFTS3( pinTime = pinTime ) else: return S_ERROR( "submitFTS: unknown FTS version %s" % ftsVersion ) def finalize( self ): """ register successfully transferred files """ if self.Status not in FTSJob.FINALSTATES: return S_OK() if not len( self ): return S_ERROR( "Empty job in finalize" ) startTime = time.time() targetSE = StorageElement( self.TargetSE ) toRegister = [ ftsFile for ftsFile in self if ftsFile.Status == "Finished" ] toRegisterDict = {} for ftsFile in toRegister: pfn = returnSingleResult( targetSE.getURL( ftsFile.LFN, protocol = 'srm' ) ) if pfn["OK"]: pfn = pfn["Value"] toRegisterDict[ ftsFile.LFN ] = { "PFN": pfn, "SE": self.TargetSE } else: self._log.error( "Error getting SRM URL", pfn['Message'] ) if toRegisterDict: self._regTotal += len( toRegisterDict ) register = self._fc.addReplica( toRegisterDict ) self._regTime += time.time() - startTime if not register["OK"]: self._log.error( 'Error registering replica', register['Message'] ) for ftsFile in toRegister: ftsFile.Error = "AddCatalogReplicaFailed" return register register = register["Value"] self._regSuccess += len( register.get( 'Successful', {} ) ) if self._regSuccess: self._log.info( 'Successfully registered %d replicas' % self._regSuccess ) failedFiles = register.get( "Failed", {} ) errorReason = {} for lfn, reason in failedFiles.items(): errorReason.setdefault( str( reason ), [] ).append( lfn ) for reason in errorReason: self._log.error( 'Error registering %d replicas' % len( errorReason[reason] ), reason ) for ftsFile in toRegister: if ftsFile.LFN in failedFiles: ftsFile.Error = "AddCatalogReplicaFailed" else: statuses = set( [ftsFile.Status for ftsFile in self] ) self._log.warn( "No replicas to register for FTSJob (%s) - Files status: '%s'" % \ ( self.Status, ','.join( sorted( statuses ) ) ) ) return S_OK() def toSQL( self ): """ prepare SQL INSERT or UPDATE statement :return: str with SQL fragment """ colVals = [] for column, value in self.__data__.items(): if value and column not in ( "FTSJobID", "LastUpdate" ): colStr = "`%s`" % column if isinstance( value, datetime.datetime ) or isinstance( value, basestring ): valStr = "'%s'" % value else: valStr = str( value ) colVals.append( ( colStr, valStr ) ) colVals.append( ( "`LastUpdate`", "UTC_TIMESTAMP()" ) ) query = [] if self.FTSJobID: query.append( "UPDATE `FTSJob` SET " ) query.append( ",".join( [ "%s=%s" % item for item in colVals ] ) ) query.append( " WHERE `FTSJobID`=%d;\n" % self.FTSJobID ) else: query.append( "INSERT INTO `FTSJob` " ) columns = "(%s)" % ",".join( [ column for column, value in colVals ] ) values = "(%s)" % ",".join( [ value for column, value in colVals ] ) query.append( columns ) query.append( " VALUES %s;" % values ) return S_OK( "".join( query ) ) def toJSON( self ): """ dump to JSON format """ digest = dict( zip( self.__data__.keys(), [ str( val ) if val else "" for val in self.__data__.values() ] ) ) digest["FTSFiles"] = [] for ftsFile in self: fileJSON = ftsFile.toJSON() if not fileJSON["OK"]: return fileJSON digest["FTSFiles"].append( fileJSON["Value"] ) return S_OK( digest )
class FTSJob(object): """ class describing one FTS job """ # # initial states INITSTATES = ("Submitted", "Ready", "Staging") # # ongoing transfer states TRANSSTATES = ("Active", "Hold") # # failed states FAILEDSTATES = ("Canceled", "Failed") # # finished (careful, must be capitalized) FINALSTATES = ("Finished", "Finisheddirty", "FinishedDirty", "Failed", "Canceled") # # missing source regexp patterns missingSourceErrors = [ re.compile( r".*INVALID_PATH\] Failed" ), re.compile( r".*INVALID_PATH\] No such file or directory" ), re.compile( r".*INVALID_PATH\] The requested file either does not exist" ), re.compile( r".*INVALID_PATH\] the server sent an error response: 500 500"\ " Command failed. : open error: No such file or directory" ), re.compile( r"SOURCE error during TRANSFER_PREPARATION phase: \[USER_ERROR\] source file doesnt exist" ) ] def __init__(self, fromDict=None): """c'tor :param self: self reference :param dict fromDict: data dict """ self.__data__ = dict.fromkeys(self.tableDesc()["Fields"].keys(), None) now = datetime.datetime.utcnow().replace(microsecond=0) self.__data__["CreationTime"] = now self.__data__["SubmitTime"] = now self.__data__["LastUpdate"] = now self.__data__["Status"] = "Submitted" self.__data__["Completeness"] = 0 self.__data__["FTSJobID"] = 0 self._regTime = 0. self._regSuccess = 0 self._regTotal = 0 self.__files__ = TypedList(allowedTypes=FTSFile) self._fc = FileCatalog() self._states = tuple( set(self.INITSTATES + self.TRANSSTATES + self.FAILEDSTATES + self.FINALSTATES)) fromDict = fromDict if fromDict else {} for ftsFileDict in fromDict.get("FTSFiles", []): self += FTSFile(ftsFileDict) if "FTSFiles" in fromDict: del fromDict["FTSFiles"] for key, value in fromDict.items(): if key not in self.__data__: raise AttributeError("Unknown FTSJob attribute '%s'" % key) if value: setattr(self, key, value) self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % (self.RequestID, self.FTSGUID), True) @staticmethod def tableDesc(): """ get table desc """ return { "Fields" : { "FTSJobID" : "INTEGER NOT NULL AUTO_INCREMENT", "FTSGUID" : "VARCHAR(64) NOT NULL", "OperationID": "INTEGER NOT NULL", "RequestID": "INTEGER NOT NULL", "SourceSE" : "VARCHAR(128) NOT NULL", "TargetSE" : "VARCHAR(128) NOT NULL", "FTSServer" : "VARCHAR(255) NOT NULL", "TargetToken": "VARCHAR(255)", "SourceToken": "VARCHAR(255)", "Size": "BIGINT NOT NULL", "Files": "INTEGER NOT NULL", "Completeness": "INTEGER NOT NULL DEFAULT 0", "FailedFiles": "INTEGER DEFAULT 0", "FailedSize": "INTEGER DEFAULT 0", "Status" : "ENUM( 'Submitted', 'Ready', 'Staging', 'Canceled', 'Active', 'Hold', "\ "'Failed', 'Finished', 'FinishedDirty', 'Assigned' ) DEFAULT 'Submitted'", "Error" : "VARCHAR(255)", "CreationTime" : "DATETIME", "SubmitTime" : "DATETIME", "LastUpdate" : "DATETIME" }, "PrimaryKey" : [ "FTSJobID" ], "Indexes" : { "FTSJobID" : [ "FTSJobID" ], "FTSGUID": [ "FTSGUID" ] } } @property def FTSJobID(self): """ FTSJobID getter """ return self.__data__["FTSJobID"] @FTSJobID.setter def FTSJobID(self, value): """ FTSJobID setter """ self.__data__["FTSJobID"] = long(value) if value else 0 @property def RequestID(self): """ RequestID getter """ return self.__data__["RequestID"] @RequestID.setter def RequestID(self, value): """ RequestID setter """ self.__data__["RequestID"] = long(value) if value else 0 @property def OperationID(self): """ OperationID getter """ return self.__data__["OperationID"] @OperationID.setter def OperationID(self, value): """ OperationID setter """ self.__data__["OperationID"] = long(value) if value else 0 @property def FTSGUID(self): """ FTSGUID prop """ return self.__data__["FTSGUID"] @FTSGUID.setter def FTSGUID(self, value): """ FTSGUID setter """ if value: if type(value) not in (str, unicode): raise TypeError("FTSGUID should be a string!") if not checkGuid(value): raise ValueError("'%s' is not a valid GUID!" % str(value)) self.__data__["FTSGUID"] = value @property def FTSServer(self): """ FTSServer getter """ return self.__data__["FTSServer"] @FTSServer.setter def FTSServer(self, url): """ FTSServer getter """ self.__data__["FTSServer"] = url @property def Completeness(self): """ completeness getter """ return self.__data__["Completeness"] @Completeness.setter def Completeness(self, value): """ completeness setter """ self.__data__["Completeness"] = int(value) if value else 0 @property def Error(self): """ error getter """ return self.__data__["Error"] @Error.setter def Error(self, error): """ error setter """ self.__data__["Error"] = str(error)[255:] @property def Files(self): """ nb files getter """ self.__data__["Files"] = len(self) return self.__data__["Files"] @Files.setter def Files(self, value): """ nb files setter """ self.__data__["Files"] = len(self) @property def Status(self): """ status prop """ if not self.__data__["Status"]: self.__data__["Status"] = "Waiting" return self.__data__["Status"] @Status.setter def Status(self, value): """ status setter """ value = self._normalizedStatus(value.strip()) if value not in self._states: raise ValueError("Unknown FTSJob Status: '%s'" % str(value)) self.__data__["Status"] = value @property def FailedFiles(self): """ nb failed files getter """ self.__data__["FailedFiles"] = len([ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) return self.__data__["FailedFiles"] @FailedFiles.setter def FailedFiles(self, value): """ nb failed files setter """ if value: self.__data__["FailedFiles"] = value else: self.__data__["FailedFiles"] = sum([ ftsFile for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) @property def Size(self): """ size getter """ # if not self.__data__["Size"]: self.__data__["Size"] = sum([ftsFile.Size for ftsFile in self]) return self.__data__["Size"] @Size.setter def Size(self, value): """ size setter """ if value: self.__data__["Size"] = value else: self.__data__["Size"] = sum([ftsFile.Size for ftsFile in self]) @property def FailedSize(self): """ size getter """ if not self.__data__["FailedSize"]: self.__data__["FailedSize"] = sum([ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) return self.__data__["FailedSize"] @FailedSize.setter def FailedSize(self, value): """ size setter """ if value: self.__data__["FailedSize"] = value else: self.__data__["FailedSize"] = sum([ ftsFile.Size for ftsFile in self if ftsFile.Status in FTSFile.FAILED_STATES ]) @property def CreationTime(self): """ creation time getter """ return self.__data__["CreationTime"] @CreationTime.setter def CreationTime(self, value=None): """ creation time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("CreationTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["CreationTime"] = value @property def SubmitTime(self): """ request's submission time getter """ return self.__data__["SubmitTime"] @SubmitTime.setter def SubmitTime(self, value=None): """ submission time setter """ if type(value) not in (datetime.datetime, str): raise TypeError("SubmitTime should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["SubmitTime"] = value @property def LastUpdate(self): """ last update getter """ return self.__data__["LastUpdate"] @LastUpdate.setter def LastUpdate(self, value=None): """ last update setter """ if type(value) not in (datetime.datetime, str): raise TypeError("LastUpdate should be a datetime.datetime!") if type(value) == str: value = datetime.datetime.strptime( value.split(".")[0], '%Y-%m-%d %H:%M:%S') self.__data__["LastUpdate"] = value @property def TargetSE(self): """ target SE getter """ return self.__data__["TargetSE"] @TargetSE.setter def TargetSE(self, targetSE): """ target SE setter """ self.__data__["TargetSE"] = targetSE @property def SourceSE(self): """ source SE getter """ return self.__data__["SourceSE"] @SourceSE.setter def SourceSE(self, sourceSE): """ source SE setter """ self.__data__["SourceSE"] = sourceSE @property def SourceToken(self): """ source token getter """ return self.__data__["SourceToken"] @SourceToken.setter def SourceToken(self, sourceToken): """ source SE setter """ self.__data__["SourceToken"] = sourceToken @property def TargetToken(self): """ target token getter """ return self.__data__["TargetToken"] @TargetToken.setter def TargetToken(self, targetToken): """ target SE setter """ self.__data__["TargetToken"] = targetToken # # FTSJobFiles arithmetics def __contains__(self, subFile): """ in operator """ return subFile in self.__files__ def __iadd__(self, ftsFile): """ += operator """ if ftsFile not in self: self.__files__.append(ftsFile) ftsFile._parent = self self.Files self.Size return self def __add__(self, ftsFile): """ + operator """ self += ftsFile def addFile(self, ftsFile): """ add :ftsFile: to FTS job """ self += ftsFile def subFile(self, ftsFile): """ remove ftsFile from this job """ if ftsFile in self: ftsFile._parent = None self.__files__.remove(ftsFile) # # helpers for looping def __iter__(self): """ files iterator """ return self.__files__.__iter__() def __getitem__(self, i): """ [] op for files """ return self.__files__.__getitem__(i) def __delitem__(self, i): """ del ftsJob[i] """ self.__files__.__delitem__(i) def __setitem__(self, i, ftsFile): """ ftsJob[i] = ftsFile """ self.__files__.__setitem__(i, ftsFile) def fileStatusList(self): """ get list of files statuses """ return [ftsFile.Status for ftsFile in self] def __nonzero__(self): """ for comparisons """ return True def __len__(self): """ nb of subFiles """ return len(self.__files__) def _surlPairs(self): """ create and return SURL pair file """ surls = [] for ftsFile in self: checksum = "%s:%s" % ( ftsFile.ChecksumType, ftsFile.Checksum ) if ftsFile.ChecksumType and ftsFile.Checksum else "" surls.append("%s %s %s" % (ftsFile.SourceSURL, ftsFile.TargetSURL, checksum)) return "\n".join(surls) def submitFTS2(self, command='glite-transfer-submit', pinTime=False): """ submit fts job using FTS2 client """ if self.FTSGUID: return S_ERROR("FTSJob has already been submitted") surls = self._surlPairs() if not surls: return S_ERROR("No files to submit") fd, fileName = tempfile.mkstemp() surlFile = os.fdopen(fd, 'w') surlFile.write(surls) surlFile.close() submitCommand = command.split() + \ [ "-s", self.FTSServer, "-f", fileName, "-o", "-K" ] if self.TargetToken: submitCommand += ["-t", self.TargetToken] if self.SourceToken: submitCommand += ["-S", self.SourceToken] if pinTime: submitCommand += [ "--copy-pin-lifetime", "%d" % pinTime, "--bring-online", '86400' ] submit = executeGridCommand("", submitCommand) os.remove(fileName) if not submit["OK"]: return submit returnCode, output, errStr = submit["Value"] if returnCode != 0: return S_ERROR(errStr if errStr else output) self.FTSGUID = output.replace("\n", "") self.Status = "Submitted" for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK() def _normalizedStatus(self, status): for st in self._states: if status.lower() == st.lower(): return st return status def monitorFTS2(self, command="glite-transfer-status", full=False): """ monitor fts job """ if not self.FTSGUID: return S_ERROR("FTSGUID not set, FTS job not submitted?") monitorCommand = command.split() + \ ["--verbose", "-s", self.FTSServer, self.FTSGUID ] if full: monitorCommand.append("-l") monitor = executeGridCommand("", monitorCommand) if not monitor["OK"]: return monitor returnCode, outputStr, errStr = monitor["Value"] # Returns a non zero status if error if returnCode != 0: if 'was not found' in outputStr and not errStr: errStr = 'Job was not found' return S_ERROR(errStr) outputStr = outputStr.replace("'", "").replace("<", "").replace(">", "") # # set FTS job status regExp = re.compile("Status:\\s+(\\S+)") # with FTS3 this can be uppercase self.Status = re.search(regExp, outputStr).group(1) statusSummary = {} # This is capitalized, even in FTS3! for state in FTSFile.ALL_STATES: regExp = re.compile("\\s+%s:\\s+(\\d+)" % state) if regExp.search(outputStr): statusSummary[state] = int( re.search(regExp, outputStr).group(1)) total = sum(statusSummary.values()) completed = sum( [statusSummary.get(state, 0) for state in FTSFile.FINAL_STATES]) self.Completeness = 100 * completed / total if total else 0 if not full: return S_OK(statusSummary) # The order of informations is not the same for glite- and fts- !!! # In order: new fts-, old fts-, glite- realJob = len(self) != 0 iExptr = None for iExptr, exptr in enumerate( ('[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Staging:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)\n[ ]+Retries:[ ]+(\\d+)', '[ ]+Source:[ ]+(\\S+)\n[ ]+Destination:[ ]+(\\S+)\n[ ]+State:[ ]+(\\S+)\n[ ]+Retries:[ ]+(\\d+)\n[ ]+Reason:[ ]+([\\S ]+).+?[ ]+Duration:[ ]+(\\d+)' )): regExp = re.compile(exptr, re.S) fileInfo = re.findall(regExp, outputStr) if fileInfo: break if not fileInfo: return S_ERROR("Error monitoring job (no regexp match)") for info in fileInfo: if iExptr == 0: # version >= 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries, _staging = info elif iExptr == 1: # version FTS3 < 3.2.30 sourceURL, targetURL, fileStatus, reason, duration, _retries = info elif iExptr == 2: # version FTS2 sourceURL, targetURL, fileStatus, _retries, reason, duration = info else: return S_ERROR('Error monitoring job (implement match %d)' % iExptr) candidateFile = None if not realJob: # This is used by the CLI monitoring of jobs in case no file was specified candidateFile = FTSFile() candidateFile.LFN = overlap(sourceURL, targetURL) candidateFile.SourceSURL = sourceURL candidateFile.Size = 0 self += candidateFile else: for ftsFile in self: if ftsFile.SourceSURL == sourceURL: candidateFile = ftsFile break if not candidateFile: continue # Can be uppercase for FTS3 if not candidateFile.TargetSURL: candidateFile.TargetSURL = targetURL candidateFile.Status = fileStatus candidateFile.Error = reason candidateFile._duration = duration if candidateFile.Status == "Failed": for missingSource in self.missingSourceErrors: if missingSource.match(reason): candidateFile.Error = "MissingSource" # If the staging info was present, record it if len(info) > 6: candidateFile._staging = info[6] # # register successful files if self.Status in FTSJob.FINALSTATES: return self.finalize() return S_OK() def submitFTS3(self, pinTime=False): """ submit fts job using FTS3 rest API """ if self.FTSGUID: return S_ERROR("FTSJob already has been submitted") transfers = [] for ftsFile in self: trans = fts3.new_transfer(ftsFile.SourceSURL, ftsFile.TargetSURL, checksum=ftsFile.Checksum, filesize=ftsFile.Size) transfers.append(trans) source_spacetoken = self.SourceToken if self.SourceToken else None dest_spacetoken = self.TargetToken if self.TargetToken else None copy_pin_lifetime = pinTime if pinTime else None bring_online = 86400 if pinTime else None job = fts3.new_job(transfers=transfers, overwrite=True, source_spacetoken=source_spacetoken, spacetoken=dest_spacetoken, bring_online=bring_online, copy_pin_lifetime=copy_pin_lifetime, retry=3) try: context = fts3.Context(self.FTSServer) self.FTSGUID = fts3.submit(context, job) except Exception, e: return S_ERROR("Error at submission: %s" % e) self.Status = "Submitted" self._log = gLogger.getSubLogger( "req_%s/FTSJob-%s" % (self.RequestID, self.FTSGUID), True) for ftsFile in self: ftsFile.FTSGUID = self.FTSGUID ftsFile.Status = "Submitted" return S_OK()