def checkOutputContainers(self): """Go through all transforms and check all datasets are registered""" logger.info("Cleaning out overall Task container...") try: dslist = [] dq2_lock.acquire() try: dslist = dq2.listDatasetsInContainer(self.getContainerName()) except: dslist = [] try: dq2.deleteDatasetsFromContainer(self.getContainerName(), dslist ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error("Problem cleaning out Task container: %s %s", x.__class__, x) except DQException as x: logger.error('DQ2 Problem cleaning out Task container: %s %s' %( x.__class__, x)) finally: dq2_lock.release() logger.info("Checking output data has been registered. This can take a few minutes...") for trf in self.transforms: logger.info("Checking containers in Tranform %d..." % trf.getID() ) trf.checkOutputContainers()
def checkOutputContainers(self): """Go through all transforms and check all datasets are registered""" logger.info("Cleaning out overall Task container...") try: dslist = [] dq2_lock.acquire() try: dslist = dq2.listDatasetsInContainer(self.getContainerName()) except: dslist = [] try: dq2.deleteDatasetsFromContainer(self.getContainerName(), dslist) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error("Problem cleaning out Task container: %s %s", x.__class__, x) except DQException as x: logger.error('DQ2 Problem cleaning out Task container: %s %s' % (x.__class__, x)) finally: dq2_lock.release() logger.info( "Checking output data has been registered. This can take a few minutes..." ) for trf in self.transforms: logger.info("Checking containers in Tranform %d..." % trf.getID()) trf.checkOutputContainers()
def checkOutputContainers(self): """Go through all completed units and make sure datasets are registered as required""" logger.info("Cleaning out transform %d container..." % self.getID()) try: dslist = [] dq2_lock.acquire() try: dslist = dq2.listDatasetsInContainer(self.getContainerName()) except: dslist = [] try: dq2.deleteDatasetsFromContainer(self.getContainerName(), dslist ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error("Problem cleaning out Transform container: %s %s", x.__class__, x) except DQException as x: logger.error('DQ2 Problem cleaning out Transform container: %s %s' %( x.__class__, x)) finally: dq2_lock.release() logger.info("Checking output data has been registered for Transform %d..." % self.getID()) for unit in self.units: if len(unit.active_job_ids) == 0: continue if unit.status == "completed" and GPI.jobs(unit.active_job_ids[0]).outputdata and GPI.jobs(unit.active_job_ids[0]).outputdata._impl._name == "DQ2OutputDataset": logger.info("Checking containers in Unit %d..." % unit.getID() ) unit.registerDataset()
def checkOutputContainers(self): """Go through all completed units and make sure datasets are registered as required""" logger.info("Cleaning out transform %d container..." % self.getID()) try: dslist = [] dq2_lock.acquire() try: dslist = dq2.listDatasetsInContainer(self.getContainerName()) except: dslist = [] try: dq2.deleteDatasetsFromContainer(self.getContainerName(), dslist) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error("Problem cleaning out Transform container: %s %s", x.__class__, x) except DQException as x: logger.error( 'DQ2 Problem cleaning out Transform container: %s %s' % (x.__class__, x)) finally: dq2_lock.release() logger.info( "Checking output data has been registered for Transform %d..." % self.getID()) for unit in self.units: if len(unit.active_job_ids) == 0: continue if unit.status == "completed" and GPI.jobs( unit.active_job_ids[0]).outputdata and GPI.jobs( unit.active_job_ids[0] ).outputdata._impl._name == "DQ2OutputDataset": logger.info("Checking containers in Unit %d..." % unit.getID()) unit.registerDataset()
def unregisterDataset(self): """Register in the transform container""" trf = self._getParent() trf_container = trf.getContainerName() fail = False try: containerinfo = {} dq2_lock.acquire() try: containerinfo = dq2.listDatasets(trf_container) except: containerinfo = {} if containerinfo != {}: job = GPI.jobs(self.active_job_ids[0]) ds_list = self.getOutputDatasetList() for ds in ds_list: try: dq2.deleteDatasetsFromContainer(trf_container, [ ds ] ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error('Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, trf_container, x.__class__, x)) fail = True except DQException as x: logger.error('DQ2 Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, trf_container, x.__class__, x)) fail = True finally: dq2_lock.release() if fail: return not fail # add dataset to the task container task = trf._getParent() task_container = task.getContainerName() try: containerinfo = {} dq2_lock.acquire() try: containerinfo = dq2.listDatasets(task_container) except: containerinfo = {} if containerinfo != {}: job = GPI.jobs(self.active_job_ids[0]) ds_list = self.getOutputDatasetList() for ds in ds_list: try: dq2.deleteDatasetsFromContainer(task_container, [ ds ] ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error('Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, task_container, x.__class__, x)) fail = True except DQException as x: logger.error('DQ2 Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, task_container, x.__class__, x)) fail = True finally: dq2_lock.release() return not fail
def unregisterDataset(self): """Register in the transform container""" trf = self._getParent() trf_container = trf.getContainerName() fail = False try: containerinfo = {} dq2_lock.acquire() try: containerinfo = dq2.listDatasets(trf_container) except: containerinfo = {} if containerinfo != {}: job = GPI.jobs(self.active_job_ids[0]) ds_list = self.getOutputDatasetList() for ds in ds_list: try: dq2.deleteDatasetsFromContainer(trf_container, [ ds ] ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error('Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, trf_container, x.__class__, x)) fail = True except DQException as x: logger.error('DQ2 Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, trf_container, x.__class__, x)) fail = True finally: dq2_lock.release() if fail: return not fail # add dataset to the task container task = trf._getParent() task_container = task.getContainerName() try: containerinfo = {} dq2_lock.acquire() try: containerinfo = dq2.listDatasets(task_container) except: containerinfo = {} if containerinfo != {}: job = GPI.jobs(self.active_job_ids[0]) ds_list = self.getOutputDatasetList() for ds in ds_list: try: dq2.deleteDatasetsFromContainer(task_container, [ ds ] ) except DQContainerDoesNotHaveDataset: pass except Exception as x: logger.error('Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, task_container, x.__class__, x)) fail = True except DQException as x: logger.error('DQ2 Problem removing dataset %s from container %s: %s %s' %( j.outputdata.datasetname, task_container, x.__class__, x)) fail = True finally: dq2_lock.release() return not fail