def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.acquisition_era_insert = AcquisitionEraInsert( self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.block_insert = BlockInsert(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.data_tier_insert = DataTierInsert(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.primary_dataset_insert = PrimaryDatasetInsert( self.logger, self.dbi, self.dbowner) self.primary_ds_type = PrimaryDSTypeList(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.dataset_insert = DatasetInsert(self.logger, self.dbi, self.dbowner) self.processing_era_id = ProcessingEraID(self.logger, self.dbi, self.dbowner) self.acquisition_era_id = AcquisitionEraID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider(data_type='transient', data_location=data_location) self.data = self.data_provider.get_processing_era_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.processing_era_insert = ProcessingEraInsert(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def test01(self): """dao.Oracle.ProcessingEra.Insert: Basic""" tran = self.conn.begin() try: self.data["processing_era_id"] = self.sequence_manager.increment(self.conn, "SEQ_PE", tran) self.data["processing_version"] = self.data["processing_version"] self.processing_era_insert.execute(self.conn, self.data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close()
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider(data_type='transient', data_location=data_location) self.data = self.data_provider.get_acquisition_era_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.acquisition_era_insert = AcquisitionEraInsert(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def test01(self): """dao.Oracle.AcquisitionEra.Insert: Basic""" tran = self.conn.begin() try: self.data["acquisition_era_id"] = self.sequence_manager.increment(self.conn, "SEQ_AQE", tran) self.data["acquisition_era_name"] = self.data["acquisition_era_name"].upper() self.acquisition_era_insert.execute(self.conn, self.data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close()
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.file_insert = FileInsert(self.logger, self.dbi, self.dbowner) self.block_list = BlockList(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.file_type_id = FileTypeGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider( data_type='transient', data_location=data_location) self.data = self.data_provider.get_block_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_block_data( regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.block_insert = BlockInsert(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertBlock(self, data): tran = self.conn.begin() try: ds_name = data["dataset"] data["dataset_id"] = self.dataset_id.execute( self.conn, ds_name, tran) del data["dataset"] data["block_id"] = self.sequence_manager.increment( self.conn, "SEQ_BK", tran) self.block_insert.execute(self.conn, data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.Block.Insert: Basic""" self._insertBlock(self.data) def test02(self): """dao.Oracle.Block.Insert: ChildBlock""" self._insertBlock(self.child_data)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider(data_type='transient', data_location=data_location) self.data = self.data_provider.get_block_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_block_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.block_insert = BlockInsert(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertBlock(self, data): tran = self.conn.begin() try: ds_name = data["dataset"] data["dataset_id"] = self.dataset_id.execute(self.conn, ds_name, tran) del data["dataset"] data["block_id"] = self.sequence_manager.increment(self.conn, "SEQ_BK", tran) self.block_insert.execute(self.conn, data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.Block.Insert: Basic""" self._insertBlock(self.data) def test02(self): """dao.Oracle.Block.Insert: ChildBlock""" self._insertBlock(self.child_data)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName="runTest"): super(Insert_t, self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_data.pkl") self.data_provider = create_dbs_data_provider(data_type="transient", data_location=data_location) self.data = self.data_provider.get_primary_dataset_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.primary_dataset_insert = PrimaryDatasetInsert(self.logger, self.dbi, self.dbowner) self.primary_ds_type = PrimaryDSTypeList(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def test01(self): """dao.Oracle.PrimaryDataset.Insert: Basic""" tran = self.conn.begin() try: self.data["primary_ds_type_id"] = ( self.primary_ds_type.execute(self.conn, self.data["primary_ds_type"], transaction=tran) )[0]["primary_ds_type_id"] del self.data["primary_ds_type"] self.data["primary_ds_id"] = self.sequence_manager.increment(self.conn, "SEQ_PDS", tran) self.primary_dataset_insert.execute(self.conn, self.data) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close()
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.primary_dataset_insert = PrimaryDatasetInsert(self.logger, self.dbi, self.dbowner) self.primary_ds_type = PrimaryDSTypeList(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t,self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)),'test_data.pkl') self.data_provider = create_dbs_data_provider(data_type='transient',data_location=data_location) self.data = self.data_provider.get_file_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_file_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.file_insert = FileInsert(self.logger, self.dbi, self.dbowner) self.block_list = BlockList(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.file_type_id = FileTypeGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertFile(self, data): tran = self.conn.begin() try: data["file_id"] = self.sequence_manager.increment(self.conn, "SEQ_FL", transaction=tran) #insert needs an id not the name, whereas list will return the name data["dataset_id"] = self.dataset_id.execute(self.conn, dataset=data["dataset"], transaction=tran) del data["dataset"] #insert needs an id not the name, whereas list will return the name block_info = self.block_list.execute(self.conn, block_name=data["block_name"], transaction=tran) data["block_id"] = block_info[0]["block_id"] del data["block_name"] #insert needs an id not the name, whereas list will return the name data["file_type_id"] = self.file_type_id.execute(self.conn, data["file_type"], transaction=tran) del data["file_type"] #No more supported, see Ticket #965 YG del data["creation_date"] del data["create_by"] self.file_insert.execute(self.conn, data, transaction=tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.File.Insert: Basic""" self._insertFile(self.data) def test02(self): """dao.Oracle.File.Insert: ChildFile""" self._insertFile(self.child_data)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t,self).__init__(methodName) data_location = os.path.join(os.path.dirname(os.path.abspath(__file__)),'test_data.pkl') self.data_provider = create_dbs_data_provider(data_type='transient',data_location=data_location) self.data = self.data_provider.get_dataset_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_dataset_data(regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.dataset_insert = DatasetInsert(self.logger, self.dbi, self.dbowner) self.processing_era_id = ProcessingEraID(self.logger, self.dbi, self.dbowner) self.acquisition_era_id = AcquisitionEraID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertDataset(self, data): tran = self.conn.begin() try: data["dataset_id"] = self.sequence_manager.increment(self.conn, "SEQ_DS", tran) data["data_tier_name"] = data["data_tier_name"].upper() data["dataset_access_type"] = data["dataset_access_type"].upper() #insert needs an id not the name, whereas list will return the name data["physics_group_id"] = None del data["physics_group_name"] #insert needs an id not the name, whereas list will return the name data["processing_era_id"] = self.processing_era_id.execute(self.conn, data["processing_version"], tran) del data["processing_version"] #insert needs an id not the name, whereas list will return the name data["acquisition_era_id"] = self.acquisition_era_id.execute(self.conn, data["acquisition_era_name"], tran) del data["acquisition_era_name"] #not needed for the insert, but it is returned by list api del data["primary_ds_type"] self.dataset_insert.execute(self.conn, data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.Dataset.Insert: Basic""" self._insertDataset(self.data) def test02(self): """dao.Oracle.Dataset.Insert: ChildDataset""" self._insertDataset(self.child_data)
def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.acquisition_era_insert = AcquisitionEraInsert(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider( data_type='transient', data_location=data_location) self.data = self.data_provider.get_file_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_file_data( regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.file_insert = FileInsert(self.logger, self.dbi, self.dbowner) self.block_list = BlockList(self.logger, self.dbi, self.dbowner) self.dataset_id = DatasetGetID(self.logger, self.dbi, self.dbowner) self.file_type_id = FileTypeGetID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertFile(self, data): tran = self.conn.begin() try: data["file_id"] = self.sequence_manager.increment(self.conn, "SEQ_FL", transaction=tran) #insert needs an id not the name, whereas list will return the name data["dataset_id"] = self.dataset_id.execute( self.conn, dataset=data["dataset"], transaction=tran) del data["dataset"] #insert needs an id not the name, whereas list will return the name block_info = self.block_list.execute(self.conn, block_name=data["block_name"], transaction=tran) for b in block_info: data["block_id"] = b["block_id"] del data["block_name"] #insert needs an id not the name, whereas list will return the name data["file_type_id"] = self.file_type_id.execute(self.conn, data["file_type"], transaction=tran) del data["file_type"] #No more supported, see Ticket #965 YG del data["creation_date"] del data["create_by"] self.file_insert.execute(self.conn, data, transaction=tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.File.Insert: Basic""" self._insertFile(self.data) def test02(self): """dao.Oracle.File.Insert: ChildFile""" self._insertFile(self.child_data)
class Insert_t(unittest.TestCase): @DaoConfig("DBSWriter") def __init__(self, methodName='runTest'): super(Insert_t, self).__init__(methodName) data_location = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'test_data.pkl') self.data_provider = create_dbs_data_provider( data_type='transient', data_location=data_location) self.data = self.data_provider.get_dataset_data(regenerate=True)[0] self.child_data = self.data_provider.get_child_dataset_data( regenerate=True)[0] def setUp(self): """setup all necessary parameters""" self.conn = self.dbi.connection() self.dataset_insert = DatasetInsert(self.logger, self.dbi, self.dbowner) self.processing_era_id = ProcessingEraID(self.logger, self.dbi, self.dbowner) self.acquisition_era_id = AcquisitionEraID(self.logger, self.dbi, self.dbowner) self.sequence_manager = SequenceManager(self.logger, self.dbi, self.dbowner) def tearDown(self): """Clean-up all necessary parameters""" self.conn.close() def _insertDataset(self, data): tran = self.conn.begin() try: data["dataset_id"] = self.sequence_manager.increment( self.conn, "SEQ_DS", tran) data["data_tier_name"] = data["data_tier_name"].upper() data["dataset_access_type"] = data["dataset_access_type"].upper() #insert needs an id not the name, whereas list will return the name data["physics_group_id"] = None del data["physics_group_name"] #insert needs an id not the name, whereas list will return the name data["processing_era_id"] = self.processing_era_id.execute( self.conn, data["processing_version"], tran) del data["processing_version"] #insert needs an id not the name, whereas list will return the name data["acquisition_era_id"] = self.acquisition_era_id.execute( self.conn, data["acquisition_era_name"], tran) del data["acquisition_era_name"] #not needed for the insert, but it is returned by list api del data["primary_ds_type"] self.dataset_insert.execute(self.conn, data, tran) except Exception as ex: tran.rollback() raise ex else: tran.commit() finally: if tran: tran.close() def test01(self): """dao.Oracle.Dataset.Insert: Basic""" self._insertDataset(self.data) def test02(self): """dao.Oracle.Dataset.Insert: ChildDataset""" self._insertDataset(self.child_data)