def __init__(self, db, schema): """args: db - db handle schema - schema name where cobaltDB tables reside (str)""" self.events_DAO = db2util.dao(db, schema, "JOB_EVENTS") self.event_classes_DAO = db2util.dao(db, schema, "JOB_EVENT_CLASSES") # Populated in __configure_events(): self.__by_class = {} # { event_class : [ event_IDs_for_class ] } self.__by_ID = None # { event_ID : event_name } self.__by_name = None # { event_name : event_ID } self.__hold_table = None # { hold_ID : False } - see note in __configure_events() self.__release_table = None # { hold_ID : release_ID_for_hold_ID } self.__configure_events() # Populate foregoing # List of all hold and release IDs self.__hold_release_IDs = self.__release_table.keys() + self.__release_table.values()
def __init__(self, dbName, username, password, schema): self.db = db2util.db() try: self.db.connect(dbName, username, password) except: logger.error("Failed to open a connection to database %s as user %s" %(dbName, username)) raise self.schema = schema table_names = ['RESERVATION_DATA', 'RESERVATION_PARTS', 'RESERVATION_EVENTS', 'RESERVATION_USERS', 'RESERVATION_PROG', 'JOB_DATA', 'JOB_ATTR', 'JOB_DEPS', 'JOB_EVENTS','JOB_COBALT_STATES', 'JOB_PROG', 'JOB_RUN_USERS'] no_pk_tables = ['RESERVATION_PARTS', 'RESERVATION_USERS', 'JOB_ATTR', 'JOB_RUN_USERS'] #Handle tables, There is probably a better way to do this. self.daos = {} try: for table_name in table_names: logger.info("Accessing table: %s" % table_name) if table_name in ['RESERVATION_EVENTS', 'JOB_EVENTS', 'JOB_COBALT_STATES']: self.daos[table_name] = StateTableData(self.db, schema, table_name) elif table_name == 'RESERVATION_DATA': self.daos[table_name] = ResDataData(self.db, schema, table_name) elif table_name == 'JOB_DATA': self.daos[table_name] = JobDataData(self.db, schema, table_name) elif table_name == 'JOB_DEPS': self.daos[table_name] = JobDepsData(self.db, schema, table_name) elif table_name == 'JOB_PROG': self.daos[table_name] = JobProgData(self.db, schema, table_name) elif table_name in no_pk_tables: self.daos[table_name] = no_pk_dao(self.db, schema, table_name) else: self.daos[table_name] = db2util.dao(self.db, schema, table_name) except: logger.error("Error accessing table %s!" % table_name) self.db.close() raise #we opened with a schema, let's make that the default for now. self.db.prepExec("set current schema %s" % schema)
def __init__(self, db, schema): """args: db - db handle schema - schema name where cobaltDB tables reside (str)""" self.events_DAO = db2util.dao(db, schema, "JOB_EVENTS") self.event_classes_DAO = db2util.dao(db, schema, "JOB_EVENT_CLASSES") # Populated in __configure_events(): self.__by_class = {} # { event_class : [ event_IDs_for_class ] } self.__by_ID = None # { event_ID : event_name } self.__by_name = None # { event_name : event_ID } self.__hold_table = None # { hold_ID : False } - see note in __configure_events() self.__release_table = None # { hold_ID : release_ID_for_hold_ID } self.__configure_events() # Populate foregoing # List of all hold and release IDs self.__hold_release_IDs = self.__release_table.keys( ) + self.__release_table.values()
def __init__(self, db, schema): """args: db - db handle schema - schema name where cobaltDB tables reside (str)""" self.events_DAO = db2util.dao(db, schema, "RESERVATION_EVENTS") # Populated in __configure_events(): self.__by_ID = None # { event_ID : event_name } self.__by_name = None # { event_name : event_ID } self.__configure_events() # Populate foregoing
def __init__(self, dbName, username, password, schema): self.db = db2util.db() try: self.db.connect(dbName, username, password) except: logger.error( "Failed to open a connection to database %s as user %s" % (dbName, username)) raise self.schema = schema table_names = [ 'RESERVATION_DATA', 'RESERVATION_PARTS', 'RESERVATION_EVENTS', 'RESERVATION_USERS', 'RESERVATION_PROG', 'JOB_DATA', 'JOB_ATTR', 'JOB_DEPS', 'JOB_EVENTS', 'JOB_COBALT_STATES', 'JOB_PROG', 'JOB_RUN_USERS' ] no_pk_tables = [ 'RESERVATION_PARTS', 'RESERVATION_USERS', 'JOB_ATTR', 'JOB_RUN_USERS' ] #Handle tables, There is probably a better way to do this. self.daos = {} try: for table_name in table_names: logger.info("Accessing table: %s" % table_name) if table_name in [ 'RESERVATION_EVENTS', 'JOB_EVENTS', 'JOB_COBALT_STATES' ]: self.daos[table_name] = StateTableData( self.db, schema, table_name) elif table_name == 'RESERVATION_DATA': self.daos[table_name] = ResDataData( self.db, schema, table_name) elif table_name == 'JOB_DATA': self.daos[table_name] = JobDataData( self.db, schema, table_name) elif table_name == 'JOB_DEPS': self.daos[table_name] = JobDepsData( self.db, schema, table_name) elif table_name == 'JOB_PROG': self.daos[table_name] = JobProgData( self.db, schema, table_name) elif table_name in no_pk_tables: self.daos[table_name] = no_pk_dao(self.db, schema, table_name) else: self.daos[table_name] = db2util.dao( self.db, schema, table_name) except: logger.error("Error accessing table %s!" % table_name) self.db.close() raise #we opened with a schema, let's make that the default for now. self.db.prepExec("set current schema %s" % schema)
class IncrID(object): """Generator for incrementing integer IDs. At maximum only one instantiation of IncrID should use DB generation, as there is only a single ID pool presently supported.""" def __getstate__(self): d = dict(self.__dict__) # Can't pickle DB objects try: del d['db'] del d['id_DAO'] except KeyError: # We might not have these two attributes; ignore on failure # Not checking use_database, as that may not exist in certain # conditions early in execution pass return d def __setstate__(self, d): self.__dict__.update(d) # Downrev state files won't have use_database in the pickle # Current usage for IncrID doesn't actually use the entire # pickled object; IncrID is instantiated fresh each run if not hasattr(self, 'use_database'): # Unfortunatly we have no idea of knowing in this scope # if db ID generation is desirable, so assume not self.use_database = False self.db = None self.id_DAO = None self.hostname = None self.resource_name = None if self.use_database: self.__db_startup() def __init__(self, use_database=False): """Initialize a new IncrID.""" self.idnum = 0 self.use_database = use_database self.db = None self.id_DAO = None self.hostname = None self.resource_name = None if self.use_database: self.__db_startup() def __db_startup(self): Cobalt.Util.init_cobalt_config() if 'db2util' not in sys.modules: global db2util import db2util self.hostname = socket.getfqdn() self.resource_name = Cobalt.Util.get_config_option( DB_SECTION, "resource_name") self.db = db2util.db() try: # No defaults here; if the param doesn't exist we may as well bomb out self.db.connect( Cobalt.Util.get_config_option(DB_SECTION, "database"), Cobalt.Util.get_config_option(DB_SECTION, "user"), Cobalt.Util.get_config_option(DB_SECTION, "pwd")) except db2util.dbError, err: # We wanted to log here, but no logging available (yet) raise self.id_DAO = db2util.dao(self.db, DB_COMMON_SCHEMA, "JOB_ID")