def __init__( self, systemInstance = 'Default', maxQueueSize = 10 ): DB.__init__( self, 'StorageManagementDB', 'StorageManagement/StorageManagementDB', maxQueueSize ) self.lock = threading.Lock() self.TASKPARAMS = ['TaskID', 'Status', 'Source', 'SubmitTime', 'LastUpdate', 'CompleteTime', 'CallBackMethod', 'SourceTaskID'] self.REPLICAPARAMS = ['ReplicaID', 'Type', 'Status', 'SE', 'LFN', 'PFN', 'Size', 'FileChecksum', 'GUID', 'SubmitTime', 'LastUpdate', 'Reason', 'Links'] self.STAGEPARAMS = ['ReplicaID', 'StageStatus', 'RequestID', 'StageRequestSubmitTime', 'StageRequestCompletedTime', 'PinLength', 'PinExpiryTime'] self.STATES = ['Failed', 'New', 'Waiting', 'Offline', 'StageSubmitted', 'Staged']
def __init__(self, maxQueueSize=10): """ """ self.dbname = 'GlastAdditionalInfoDB' self.logger = gLogger.getSubLogger('GlastAdditionalInfoDB') DB.__init__(self, self.dbname, 'ResourceStatus/GlastAdditionalInfoDB', maxQueueSize) self.fields = ["CEName", "Status", "Software_Tag"] self._createTables({ "SoftwareTags_has_Sites": { "Fields": { "idRelation": "INT NOT NULL AUTO_INCREMENT", "CEName": "VARCHAR(45) NOT NULL", "Status": "ENUM('New','Installing','Valid','Bad','Removed') DEFAULT 'New'", "Software_Tag": "VARCHAR(255) NOT NULL", "LastUpdateTime": "DATETIME" }, "PrimaryKey": ['idRelation'], 'Indexes': { "Index": ["idRelation", "Software_Tag", "CEName", 'Status'] } } }) self.vo = getVO('glast.org') ##tags statuses: self.tag_statuses = ['New', 'Installing', 'Valid', 'Bad', 'Removed']
def __init__( self, maxQueueSize = 10 ): DB.__init__( self, 'SandboxMetadataDB', 'WorkloadManagement/SandboxMetadataDB', maxQueueSize ) result = self.__initializeDB() if not result[ 'OK' ]: raise Exception( "Can't create tables: %s" % result[ 'Message' ]) self.__assignedSBGraceDays = 0 self.__unassignedSBGraceDays = 15
def __init__( self ): """ Standard Constructor """ DB.__init__( self, 'SystemLoggingDB', 'Framework/SystemLoggingDB', debug = DEBUG ) result = self._checkTable() if not result['OK']: gLogger.error( 'Failed to check/create the database tables', result['Message'] )
def __init__( self ): DB.__init__( self, 'SandboxMetadataDB', 'WorkloadManagement/SandboxMetadataDB' ) result = self.__initializeDB() if not result[ 'OK' ]: raise RuntimeError( "Can't create tables: %s" % result[ 'Message' ] ) self.__assignedSBGraceDays = 0 self.__unassignedSBGraceDays = 15
def __init__(self): DB.__init__(self, "SandboxMetadataDB", "WorkloadManagement/SandboxMetadataDB") result = self.__initializeDB() if not result["OK"]: raise RuntimeError("Can't create tables: %s" % result["Message"]) self.__assignedSBGraceDays = 0 self.__unassignedSBGraceDays = 15
def __init__( self ): """ """ self.ops = Operations() self.dbname = 'OverlayDB' self.logger = gLogger.getSubLogger('OverlayDB') DB.__init__( self, self.dbname, 'Overlay/OverlayDB' ) self._createTables( { "OverlayData" : { 'Fields' : { 'Site' : "VARCHAR(255) UNIQUE NOT NULL", 'NumberOfJobs' : "INTEGER DEFAULT 0" }, 'PrimaryKey' : 'Site', 'Indexes': {'Index':['Site']} } } ) limits = self.ops.getValue("/Overlay/MaxConcurrentRunning", 200) self.limits = {} self.limits["default"] = limits res = self.ops.getSections("/Overlay/Sites/") sites = [] if res['OK']: sites = res['Value'] for tempsite in sites: res = self.ops.getValue("/Overlay/Sites/%s/MaxConcurrentRunning" % tempsite, 200) self.limits[tempsite] = res self.logger.info("Using the following restrictions : %s" % self.limits)
def __init__(self, parentLogger=None): """Standard Constructor""" DB.__init__(self, "JobLoggingDB", "WorkloadManagement/JobLoggingDB", parentLogger=parentLogger)
def __init__(self, parentLogger=None): """c'tor Initialize the DB """ DB.__init__(self, "ComponentMonitoringDB", "Framework/ComponentMonitoringDB", parentLogger=parentLogger) retVal = self.__initializeDB() if not retVal["OK"]: raise Exception("Can't create tables: %s" % retVal["Message"]) self.__optionalFields = ("startTime", "cycles", "version", "queries", "DIRACVersion", "description", "platform") self.__mainFields = ( "Id", "Setup", "Type", "ComponentName", "Host", "Port", "StartTime", "LastHeartbeat", "cycles", "queries", "LoggingState", ) self.__versionFields = ("VersionTimestamp", "Version", "DIRACVersion", "Platform", "Description")
def __init__(self): """ Standard Constructor """ DB.__init__(self, 'DataIntegrityDB', 'DataManagement/DataIntegrityDB') self.tableName = 'Problematics' self.tableDict = { self.tableName: { 'Fields': { 'FileID': 'INTEGER NOT NULL AUTO_INCREMENT', 'Prognosis': 'VARCHAR(32) NOT NULL', 'LFN': 'VARCHAR(255) NOT NULL', 'PFN': 'VARCHAR(255)', 'Size': 'BIGINT(20)', 'SE': 'VARCHAR(32)', 'GUID': 'VARCHAR(255)', 'Status': 'VARCHAR(32) DEFAULT "New"', 'Retries': 'INTEGER DEFAULT 0', 'InsertDate': 'DATETIME NOT NULL', 'LastUpdate': 'DATETIME NOT NULL', 'Source': 'VARCHAR(127) NOT NULL DEFAULT "Unknown"', }, 'PrimaryKey': 'FileID', 'Indexes': { 'PS': ['Prognosis', 'Status'] }, 'Engine': 'InnoDB', } } self.fieldList = [ 'FileID', 'LFN', 'PFN', 'Size', 'SE', 'GUID', 'Prognosis' ]
def __init__(self, maxQueueSize=10): """ Standard Constructor """ DB.__init__(self, 'JobLoggingDB', 'WorkloadManagement/JobLoggingDB', maxQueueSize) self.gLogger = gLogger
def __init__( self, databaseLocation='DataManagement/FileCatalogDB', maxQueueSize=10 ): """ Standard Constructor """ # The database location can be specified in System/Database form or in just the Database name # in the DataManagement system db = databaseLocation if db.find('/') == -1: db = 'DataManagement/' + db DB.__init__(self,'FileCatalogDB',db,maxQueueSize) result = self._createTables( self.__tables ) if not result['OK']: gLogger.error( "Failed to create tables", str( self.__tables.keys() ) ) elif result['Value']: gLogger.info( "Tables created: %s" % ','.join( result['Value'] ) ) self.ugManager = None self.seManager = None self.securityManager = None self.dtree = None self.fileManager = None self.dmeta = None self.fmeta = None self.statusDict = {}
def __init__(self, *args, **kwargs): if len(args) == 1: if isinstance(args[0], str): # systemInstance=args[0] maxQueueSize=10 if isinstance(args[0], int): maxQueueSize=args[0] # systemInstance='Default' elif len(args) == 2: # systemInstance=args[0] maxQueueSize=args[1] elif len(args) == 0: # systemInstance='Default' maxQueueSize=10 if 'DBin' in kwargs.keys(): DBin = kwargs['DBin'] if isinstance(DBin, list): from DIRAC.Core.Utilities.MySQL import MySQL self.db = MySQL('localhost', DBin[0], DBin[1], 'ResourceManagementDB') else: self.db = DBin else: from DIRAC.Core.Base.DB import DB self.db = DB('ResourceManagementDB','ResourceStatus/ResourceManagementDB',maxQueueSize)
def __init__( self, maxQueueSize = 10 ): DB.__init__( self, 'SandboxMetadataDB', 'WorkloadManagement/SandboxMetadataDB', maxQueueSize ) result = self.__initializeDB() if not result[ 'OK' ]: raise RuntimeError( "Can't create tables: %s" % result[ 'Message' ] ) self.__assignedSBGraceDays = 0 self.__unassignedSBGraceDays = 15
def __init__( self, systemInstance = 'Default', maxQueueSize = 10 ): """c'tor :param self: self reference """ self.getIdLock = threading.Lock() DB.__init__( self, "ReqDB", "RequestManagement/ReqDB", maxQueueSize )
def __init__( self ): """ Standard Constructor """ DB.__init__( self, 'DataIntegrityDB', 'DataManagement/DataIntegrityDB' ) self.tableName = 'Problematics' self.tableDict = { self.tableName: { 'Fields' : { 'FileID': 'INTEGER NOT NULL AUTO_INCREMENT', 'Prognosis': 'VARCHAR(32) NOT NULL', 'LFN': 'VARCHAR(255) NOT NULL', 'PFN': 'VARCHAR(255)', 'Size': 'BIGINT(20)', 'SE': 'VARCHAR(32)', 'GUID': 'VARCHAR(255)', 'Status': 'VARCHAR(32) DEFAULT "New"', 'Retries': 'INTEGER DEFAULT 0', 'InsertDate': 'DATETIME NOT NULL', 'LastUpdate': 'DATETIME NOT NULL', 'Source': 'VARCHAR(127) NOT NULL DEFAULT "Unknown"', }, 'PrimaryKey': 'FileID', 'Indexes': { 'PS': ['Prognosis', 'Status']}, 'Engine': 'InnoDB', } } self.fieldList = ['FileID', 'LFN', 'PFN', 'Size', 'SE', 'GUID', 'Prognosis']
def __init__(self, maxQueueSize=10): DB.__init__(self, 'NotificationDB', 'Framework/NotificationDB', maxQueueSize) result = self.__initializeDB() if not result['OK']: self.log.fatal("Cannot initialize DB!", result['Message']) self.__alarmQueryFields = [ 'alarmid', 'author', 'creationtime', 'modtime', 'subject', 'status', 'priority', 'notifications', 'body', 'assignee', 'alarmkey' ] self.__alarmLogFields = [ 'timestamp', 'author', 'comment', 'modifications' ] self.__notificationQueryFields = ('id', 'user', 'seen', 'message', 'timestamp') self.__newAlarmMandatoryFields = [ 'author', 'subject', 'status', 'notifications', 'body', 'assignee', 'priority' ] self.__updateAlarmIdentificationFields = ['id', 'alarmKey'] self.__updateAlarmMandatoryFields = ['author'] self.__updateAlarmAtLeastOneField = ['comment', 'modifications'] self.__updateAlarmModificableFields = [ 'status', 'assignee', 'priority' ] self.__validAlarmStatus = ['Open', 'OnGoing', 'Closed', 'Testing'] self.__validAlarmNotifications = ['Web', 'Mail', 'SMS'] self.__validAlarmPriorities = ['Low', 'Medium', 'High', 'Extreme']
def __init__(self, maxQueueSize=10): """ """ # self.ops = Operations() self.dbname = "GlastAdditionalInfoDB" self.logger = gLogger.getSubLogger("GlastAdditionalInfoDB") DB.__init__(self, self.dbname, "ResourceStatus/GlastAdditionalInfoDB", maxQueueSize) self.fields = ["CEName", "Status", "Software_Tag"] self._createTables( { "SoftwareTags_has_Sites": { "Fields": { "idRelation": "INT NOT NULL AUTO_INCREMENT", "CEName": "VARCHAR(45) NOT NULL", "Status": "ENUM('New','Installing','Valid','Bad','Removed') DEFAULT 'New'", "Software_Tag": "VARCHAR(255) NOT NULL", "LastUpdateTime": "DATETIME", }, "PrimaryKey": ["idRelation"], "Indexes": {"Index": ["idRelation", "Software_Tag", "CEName", "Status"]}, } } ) self.vo = getVO("glast.org") ##tags statuses: self.tag_statuses = ["New", "Installing", "Valid", "Bad", "Removed"]
def __init__( self, maxQueueSize = 10 ): """ """ self.ops = Operations() self.dbname = 'OverlayDB' self.logger = gLogger.getSubLogger('OverlayDB') DB.__init__( self, self.dbname, 'Overlay/OverlayDB', maxQueueSize ) self._createTables( { "OverlayData" : { 'Fields' : { 'Site' : "VARCHAR(256) UNIQUE NOT NULL", 'NumberOfJobs' : "INTEGER DEFAULT 0" }, 'PrimaryKey' : 'Site', 'Indexes': {'Index':['Site']} } } ) limits = self.ops.getValue("/Overlay/MaxConcurrentRunning", 200) self.limits = {} self.limits["default"] = limits res = self.ops.getSections("/Overlay/Sites/") sites = [] if res['OK']: sites = res['Value'] for tempsite in sites: res = self.ops.getValue("/Overlay/Sites/%s/MaxConcurrentRunning" % tempsite, 200) self.limits[tempsite] = res self.logger.info("Using the following restrictions : %s" % self.limits)
def __init__(self, *args, **kwargs): if len(args) == 1: if isinstance(args[0], str): # systemInstance=args[0] maxQueueSize = 10 if isinstance(args[0], int): maxQueueSize = args[0] # systemInstance='Default' elif len(args) == 2: # systemInstance=args[0] maxQueueSize = args[1] elif len(args) == 0: # systemInstance='Default' maxQueueSize = 10 if 'DBin' in kwargs.keys(): DBin = kwargs['DBin'] if isinstance(DBin, list): from DIRAC.Core.Utilities.MySQL import MySQL self.db = MySQL('localhost', DBin[0], DBin[1], 'ResourceManagementDB') else: self.db = DBin else: from DIRAC.Core.Base.DB import DB self.db = DB('ResourceManagementDB', 'ResourceStatus/ResourceManagementDB', maxQueueSize)
def __init__(self, maxQueueSize=10): """ Standard Constructor """ DB.__init__(self, 'SystemLoggingDB', 'Framework/SystemLoggingDB', maxQueueSize, debug=DEBUG)
def __init__( self, maxQueueSize = 10 ): DB.__init__( self, 'VirtualMachineDB', 'WorkloadManagement/VirtualMachineDB', maxQueueSize ) if not self._MySQL__initialized: raise Exception( 'Can not connect to VirtualMachineDB, exiting...' ) result = self.__initializeDB() if not result[ 'OK' ]: raise Exception( "Can't create tables: %s" % result[ 'Message' ] )
def __init__( self, maxQueueSize = 10 ): """ c'tor :param self: self reference :param int maxQueueSize: query queue size """ DB.__init__( self, "DataLoggingDB", "DataManagement/DataLoggingDB", maxQueueSize ) self.gLogger = gLogger
def __init__(self, dbname=None, dbconfig=None, dbIn=None): """The standard constructor takes the database name (dbname) and the name of the configuration section (dbconfig) """ if not dbname: dbname = "ProductionDB" if not dbconfig: dbconfig = "Production/ProductionDB" if not dbIn: DB.__init__(self, dbname, dbconfig) self.lock = threading.Lock() self.prodValidator = ProdValidator() self.ProdTransManager = ProdTransManager() self.PRODPARAMS = [ "ProductionID", "ProductionName", "Description", "CreationDate", "LastUpdate", "AuthorDN", "AuthorGroup", "Status", ] self.TRANSPARAMS = [ "TransformationID", "ProductionID", "LastUpdate", "InsertedTime" ] self.TRANSLINKSPARAMS = [ "TransformationID", "ParentTransformationID", "ProductionID" ] self.PRODSTEPSPARAMS = [ "StepID", "Name", "Description", "LongDescription", "Body", "Type", "Plugin", "AgentType", "GroupSize", "InputQuery", "OutputQuery", "LastUpdate", "InsertedTime", ] self.statusActionDict = { "New": None, "Active": "startTransformation", "Stopped": "stopTransformation", "Cleaned": "cleanTransformation", }
def __init__( self, systemInstance = 'Default', maxQueueSize = 10 ): """ c'tor :param self: self reference :param str systemInstance: ??? :param int maxQueueSize: queue size """ DB.__init__( self, 'RequestDB', 'RequestManagement/RequestDB', maxQueueSize ) self.getIdLock = threading.Lock()
def __init__(self): """ Constructor """ self.__permValues = ['USER', 'GROUP', 'VO', 'ALL'] self.__permAttrs = ['ReadAccess', 'PublishAccess'] DB.__init__(self, 'UserProfileDB', 'Framework/UserProfileDB') retVal = self.__initializeDB() if not retVal['OK']: raise Exception("Can't create tables: %s" % retVal['Message'])
def __init__(self): """ Constructor """ self.__permValues = ["USER", "GROUP", "VO", "ALL"] self.__permAttrs = ["ReadAccess", "PublishAccess"] DB.__init__(self, "UserProfileDB", "Framework/UserProfileDB", 10) retVal = self.__initializeDB() if not retVal["OK"]: raise Exception("Can't create tables: %s" % retVal["Message"])
def __init__(self, maxQueueSize=10): DB.__init__(self, 'VirtualMachineDB', 'WorkloadManagement/VirtualMachineDB', maxQueueSize) if not self._MySQL__initialized: raise Exception('Can not connect to VirtualMachineDB, exiting...') result = self.__initializeDB() if not result['OK']: raise Exception("Can't create tables: %s" % result['Message'])
def __init__(self): """Constructor""" self.__permValues = ["USER", "GROUP", "VO", "ALL"] self.__permAttrs = ["ReadAccess", "PublishAccess"] self.__cache = cachetools.TTLCache(1024, 15) DB.__init__(self, "UserProfileDB", "Framework/UserProfileDB") retVal = self.__initializeDB() if not retVal["OK"]: raise Exception("Can't create tables: %s" % retVal["Message"])
def __init__( self ): """ Constructor """ self.__permValues = [ 'USER', 'GROUP', 'VO', 'ALL' ] self.__permAttrs = [ 'ReadAccess', 'PublishAccess' ] DB.__init__(self, 'UserProfileDB', 'Framework/UserProfileDB') retVal = self.__initializeDB() if not retVal[ 'OK' ]: raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] )
def __init__( self, maxQueueSize = 10 ): DB.__init__( self, 'BigDataDB', 'WorkloadManagement/BigDataDB', maxQueueSize ) if not self._MySQL__initialized: raise Exception( 'Can not connect to BigDataDB, exiting...' ) result = self.__initializeDB() if not result[ 'OK' ]: raise Exception( 'Can\'t create tables: %s' % result[ 'Message' ] )
def __init__(self, parentLogger=None): DB.__init__(self, "SandboxMetadataDB", "WorkloadManagement/SandboxMetadataDB", parentLogger=parentLogger) result = self.__initializeDB() if not result["OK"]: raise RuntimeError(f"Can't create tables: {result['Message']}") self.__assignedSBGraceDays = 0 self.__unassignedSBGraceDays = 15
def __init__( self, databaseLocation='DataManagement/FileCatalogDB', maxQueueSize=10 ): """ Standard Constructor """ # The database location can be specified in System/Database form or in just the Database name # in the DataManagement system db = databaseLocation if db.find('/') == -1: db = 'DataManagement/' + db DB.__init__(self,'FileCatalogDB',db,maxQueueSize)
def __init__(self): """Standard Constructor""" DB.__init__(self, "DataIntegrityDB", "DataManagement/DataIntegrityDB") self.tableName = "Problematics" self.fieldList = ["FileID", "LFN", "PFN", "Size", "SE", "GUID", "Prognosis"] retVal = self.__initializeDB() if not retVal["OK"]: raise Exception("Can't create tables: %s" % retVal["Message"])
def __init__(self, dbname=None, dbconfig=None, dbIn=None): """ The standard constructor takes the database name (dbname) and the name of the configuration section (dbconfig) """ if not dbname: dbname = 'ProductionDB' if not dbconfig: dbconfig = 'Production/ProductionDB' if not dbIn: DB.__init__(self, dbname, dbconfig) self.lock = threading.Lock() self.prodValidator = ProdValidator() self.ProdTransManager = ProdTransManager() self.PRODPARAMS = ['ProductionID', 'ProductionName', 'Description', 'CreationDate', 'LastUpdate', 'AuthorDN', 'AuthorGroup', 'Status'] self.TRANSPARAMS = ['TransformationID', 'ProductionID', 'LastUpdate', 'InsertedTime'] self.TRANSLINKSPARAMS = ['TransformationID', 'ParentTransformationID', 'ProductionID'] self.PRODSTEPSPARAMS = ['StepID', 'Name', 'Description', 'LongDescription', 'Body', 'Type', 'Plugin', 'AgentType', 'GroupSize', 'InputQuery', 'OutputQuery', 'LastUpdate', 'InsertedTime'] self.statusActionDict = { 'New': None, 'Active': 'startTransformation', 'Stopped': 'stopTransformation', 'Cleaned': 'cleanTransformation'}
def __init__(self, systemInstance='Default', maxQueueSize=10): """ c'tor :param self: self reference :param str systemInstance: ??? :param int maxQueueSize: queue size """ DB.__init__(self, 'RequestDB', 'RequestManagement/RequestDB', maxQueueSize) self.getIdLock = threading.Lock()
def __init__(self): """ Standard Constructor """ DB.__init__(self, 'DataIntegrityDB', 'DataManagement/DataIntegrityDB') self.tableName = 'Problematics' self.fieldList = ['FileID', 'LFN', 'PFN', 'Size', 'SE', 'GUID', 'Prognosis'] retVal = self.__initializeDB() if not retVal['OK']: raise Exception("Can't create tables: %s" % retVal['Message'])
def __init__(self): """ Standard Constructor """ DB.__init__(self, 'SystemLoggingDB', 'Framework/SystemLoggingDB', debug=DEBUG) result = self._checkTable() if not result['OK']: gLogger.error('Failed to check/create the database tables', result['Message'])
def __init__(self, mySQL=None): ''' Constructor, accepts any DB or mySQL connection, mostly used for testing purposes. ''' self._tableDict = self.__generateTables() if mySQL is not None: self.database = mySQL else: self.database = DB('ResourceManagementDB', 'ResourceStatus/ResourceManagementDB')
def __init__( self, requireVoms = False, useMyProxy = False ): DB.__init__( self, 'ComponentMonitoringDB', 'Framework/ComponentMonitoringDB' ) random.seed() retVal = self.__initializeDB() if not retVal[ 'OK' ]: raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] ) self.__optionalFields = ( 'startTime', 'cycles', 'version', 'queries', 'DIRACVersion', 'description', 'platform' ) self.__mainFields = ( "Id", "Setup", "Type", "ComponentName", "Host", "Port", "StartTime", "LastHeartbeat", "cycles", "queries", "LoggingState" ) self.__versionFields = ( 'VersionTimestamp', 'Version', 'DIRACVersion', 'Platform', 'Description' )
def __init__( self, useMyProxy = False, maxQueueSize = 10 ): DB.__init__( self, 'ComponentMonitoringDB', 'Framework/ComponentMonitoringDB', maxQueueSize ) random.seed() retVal = self.__initializeDB() if not retVal[ 'OK' ]: raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] ) self.__optionalFields = ( 'startTime', 'cycles', 'version', 'queries', 'DIRACVersion', 'description', 'platform' ) self.__mainFields = ( "Id", "Setup", "Type", "ComponentName", "Host", "Port", "StartTime", "LastHeartbeat", "cycles", "queries", "LoggingState" ) self.__versionFields = ( 'VersionTimestamp', 'Version', 'DIRACVersion', 'Platform', 'Description' )
def __init__(self, requireVoms=False, useMyProxy=False, maxQueueSize=10): DB.__init__(self, 'ProxyDB', 'Framework/ProxyDB', maxQueueSize) random.seed() self.__defaultRequestLifetime = 300 # 5min self.__defaultTokenLifetime = 86400 * 7 # 1 week self.__defaultTokenMaxUses = 50 self.__vomsRequired = requireVoms self.__useMyProxy = useMyProxy self._minSecsToAllowStore = 3600 retVal = self.__initializeDB() if not retVal['OK']: raise Exception("Can't create tables: %s" % retVal['Message'])
def __init__( self, sandbox_type, maxQueueSize=10 ): """ Standard Constructor """ DB.__init__(self,sandbox_type,'WorkloadManagement/SandboxDB',maxQueueSize) self.maxSize = gConfig.getValue( self.cs_path+'/MaxSandboxSize', 16 ) self.maxPartitionSize = gConfig.getValue( self.cs_path+'/MaxPartitionSize', 2 ) self.maxPartitionSize *= 1024*1024*1024 # in GBs self.maxSizeToRecover = gConfig.getValue( self.cs_path+'/MaxPartitionSize', 200 ) self.maxSizeToRecover *= 1024*1024 # in MBs self.lock = threading.Lock()
def __init__( self, systemInstance = "Default" ): """c'tor :param self: self reference :param str systemInstance: ??? :param int maxQueueSize: size of queries queue """ DB.__init__( self, "FTSDB", "DataManagement/FTSDB" ) # self.log = gLogger.getSubLogger( "DataManagement/FTSDB" ) # # private lock self.getIdLock = LockRing().getLock( "FTSDBLock" ) # # max attempt for reschedule self.maxAttempt = 100
def __init__(self, systemInstance="Default"): """c'tor :param self: self reference :param str systemInstance: ??? :param int maxQueueSize: size of queries queue """ DB.__init__(self, "FTSDB", "DataManagement/FTSDB") # self.log = gLogger.getSubLogger( "DataManagement/FTSDB" ) # # private lock self.getIdLock = LockRing().getLock("FTSDBLock") # # max attempt for reschedule self.maxAttempt = 100
def __init__( self, requireVoms = False, useMyProxy = False, maxQueueSize = 10 ): DB.__init__( self, 'ProxyDB', 'Framework/ProxyDB', maxQueueSize ) random.seed() self.__defaultRequestLifetime = 300 # 5min self.__defaultTokenLifetime = 86400 * 7 # 1 week self.__defaultTokenMaxUses = 50 self.__vomsRequired = requireVoms self.__useMyProxy = useMyProxy self._minSecsToAllowStore = 3600 retVal = self.__initializeDB() if not retVal[ 'OK' ]: raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] )
def __init__(self, useMyProxy=False): DB.__init__(self, 'ProxyDB', 'Framework/ProxyDB') random.seed() self.__defaultRequestLifetime = 300 # 5min self.__defaultTokenLifetime = 86400 * 7 # 1 week self.__defaultTokenMaxUses = 50 self.__useMyProxy = useMyProxy self._minSecsToAllowStore = 3600 self.__notifClient = NotificationClient() retVal = self.__initializeDB() if not retVal['OK']: raise Exception("Can't create tables: %s" % retVal['Message']) self.purgeExpiredProxies(sendNotifications=False) self.__checkDBVersion()
def __init__( self, maxQueueSize = 10 ): """ """ #self.ops = Operations() self.dbname = 'GlastAdditionnalInfoDB' self.logger = gLogger.getSubLogger('GlastAdditionnalInfoDB') DB.__init__( self, self.dbname, 'Glast/GlastAdditionnalInfoDB', maxQueueSize ) self._createTables( { "SoftwareTags_has_Sites" :{"Fields":{"idRelation":"INT NOT NULL AUTO_INCREMENT", "SiteName":"VARCHAR(45) NOT NULL", "Software_Tag":"VARCHAR(60) NOT NULL"}, "PrimaryKey" : ['idRelation'], 'Indexes' : { "Index":["idRelation","Software_Tag","SiteName"]} } } )
def __init__( self, useMyProxy = False ): DB.__init__( self, 'ProxyDB', 'Framework/ProxyDB' ) random.seed() self.__defaultRequestLifetime = 300 # 5min self.__defaultTokenLifetime = 86400 * 7 # 1 week self.__defaultTokenMaxUses = 50 self.__useMyProxy = useMyProxy self._minSecsToAllowStore = 3600 self.__notifClient = NotificationClient() retVal = self.__initializeDB() if not retVal[ 'OK' ]: raise Exception( "Can't create tables: %s" % retVal[ 'Message' ] ) self.purgeExpiredProxies( sendNotifications = False ) self.__checkDBVersion()
def _update( self, cmd, conn=False ): start = time.time() ret = DB._update( self, cmd, conn ) if DEBUG: print >> debugFile, time.time() - start, cmd.replace('\n','') debugFile.flush() return ret
def __init__( self, maxQueueSize = 10 ): """ """ self.dbname = 'ProcessDB' DB.__init__( self, self.dbname, 'ProcessProduction/ProcessDB', maxQueueSize ) self.ProdTypes = ['MCGeneration', "MCSimulation", "MCReconstruction"] self.SoftwareParams = ['Path', 'Valid', 'AppName', 'AppVersion', 'Platform'] self.ProcessDataParams = ['CrossSection', 'NbEvts', 'Path', 'Files', 'Polarisation'] self.SiteStatuses = ['OK', 'Banned'] self.Operations = ['Installation', 'Removal'] self.OperationsStatus = ['Done', 'Running', 'Waiting', 'Failed'] result = self.__initializeDB() if not result[ 'OK' ]: self.log.fatal( "Cannot initialize DB!", result[ 'Message' ] )
def __init__( self ): DB.__init__( self, 'NotificationDB', 'Framework/NotificationDB' ) result = self.__initializeDB() if not result[ 'OK' ]: self.log.fatal( "Cannot initialize DB!", result[ 'Message' ] ) self.__alarmQueryFields = [ 'alarmid', 'author', 'creationtime', 'modtime', 'subject', 'status', 'priority', 'notifications', 'body', 'assignee', 'alarmkey' ] self.__alarmLogFields = [ 'timestamp', 'author', 'comment', 'modifications' ] self.__notificationQueryFields = ( 'id', 'user', 'seen', 'message', 'timestamp' ) self.__newAlarmMandatoryFields = [ 'author', 'subject', 'status', 'notifications', 'body', 'assignee', 'priority' ] self.__updateAlarmIdentificationFields = [ 'id', 'alarmKey' ] self.__updateAlarmMandatoryFields = [ 'author' ] self.__updateAlarmAtLeastOneField = [ 'comment', 'modifications' ] self.__updateAlarmModificableFields = [ 'status', 'assignee', 'priority' ] self.__validAlarmStatus = [ 'Open', 'OnGoing', 'Closed', 'Testing' ] self.__validAlarmNotifications = [ 'Web', 'Mail', 'SMS' ] self.__validAlarmPriorities = [ 'Low', 'Medium', 'High', 'Extreme' ]
def __init__( self, maxQueueSize = 10 ): random.seed() DB.__init__( self, 'TaskQueueDB', 'WorkloadManagement/TaskQueueDB', maxQueueSize ) self.__multiValueDefFields = ( 'Sites', 'GridCEs', 'GridMiddlewares', 'BannedSites', 'LHCbPlatforms', 'PilotTypes', 'SubmitPools', 'JobTypes' ) self.__multiValueMatchFields = ( 'GridCE', 'Site', 'GridMiddleware', 'LHCbPlatform', 'PilotType', 'SubmitPool', 'JobType' ) self.__bannedJobMatchFields = ( 'Site', ) self.__singleValueDefFields = ( 'OwnerDN', 'OwnerGroup', 'Setup', 'CPUTime' ) self.__mandatoryMatchFields = ( 'Setup', 'CPUTime' ) self.__defaultCPUSegments = maxCPUSegments self.__maxMatchRetry = 3 self.__jobPriorityBoundaries = ( 0.001, 10 ) self.__groupShares = {} self.__csSection = "/Operations/Scheduling/%s/" % gConfig.getValue( "/DIRAC/Setup" ) self.__ensureInsertionIsSingle = False self.__sharesCorrector = SharesCorrector( self.__csSection ) result = self.__initializeDB() if not result[ 'OK' ]: raise Exception( "Can't create tables: %s" % result[ 'Message' ] )
def __init__(self, maxQueueSize=10, mySQL=None): """ Constructor, accepts any DB or mySQL connection, mostly used for testing purposes. """ self._tableDict = self.__generateTables() if mySQL is not None: self.database = mySQL else: self.database = DB("ResourceManagementDB", "ResourceStatus/ResourceManagementDB", maxQueueSize)
def _update( self, cmd, conn=False ): """ Update MPIJob Database """ print "DB4" start = Time.time() ret = DB._update( self, cmd, conn ) if DEBUG: print >> debugFile, Time.time() - start, cmd.replace('\n','') debugFile.flush() print ret return ret
def _query( self, cmd, conn=False ): """ Make queries to MPIJob DB """ print "DB3" start = Time.time() ret = DB._query( self, cmd, conn ) if DEBUG: print >> debugFile, Time.time() - start, cmd.replace('\n','') debugFile.flush() print ret return ret