def addUnit(self, outname, dsname, template=None): """Create a new unit based on this ds and output""" unit = AtlasUnit() if not template: unit.inputdata = DQ2Dataset() else: unit.inputdata = stripProxy(template) unit.inputdata.dataset = dsname unit.name = outname self.addUnitToTRF(unit)
def addUnit(self, outname, dsname, template = None): """Create a new unit based on this ds and output""" unit = AtlasUnit() if not template: unit.inputdata = DQ2Dataset() else: unit.inputdata = stripProxy( template ) unit.inputdata.dataset = dsname unit.name = outname self.addUnitToTRF( unit )
def createUnits(self): """Create new units if required given the inputdata""" # call parent for chaining super(AtlasTransform, self).createUnits() # if there is no input data, just create a single unit given the application if len(self.inputdata) == 0 and len(self.units) == 0: unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF(unit) # loop over input data and see if we need to create any more units for inds in self.inputdata: ok = True if inds._name == "DQ2Dataset": # check if this data is being run over ok = False for unit in self.units: if unit.inputdata.dataset == inds.dataset: ok = True if not ok: # new unit required for this dataset unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF(unit) unit.inputdata = inds elif inds._name == "ATLASLocalDataset": # different behaviour depending on files_per_unit if self.files_per_unit < 0: # check if this data is being run over ok = False for unit in self.units: if set(unit.inputdata.names) == set(inds.names): ok = True if not ok: # new unit required for this dataset unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF(unit) unit.inputdata = inds else: ok = False curr_data = [] for unit in self.units: curr_data.extend(unit.inputdata.names) if set(inds.names) in set(curr_data) or set( inds.names) == set(curr_data): ok = True if not ok: # new unit(s) required for this dataset num = 0 while num < len(inds.names): unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF(unit) unit.inputdata = inds.clone() unit.inputdata.names = inds.names[num:num + self. files_per_unit] num += self.files_per_unit
def createUnits(self): """Create new units if required given the inputdata""" # call parent for chaining super(AtlasTransform,self).createUnits() # if there is no input data, just create a single unit given the application if len(self.inputdata) == 0 and len(self.units) == 0: unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF( unit ) # loop over input data and see if we need to create any more units for inds in self.inputdata: ok = True if inds._name == "DQ2Dataset": # check if this data is being run over ok = False for unit in self.units: if unit.inputdata.dataset == inds.dataset: ok = True if not ok: # new unit required for this dataset unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF( unit ) unit.inputdata = inds elif inds._name == "ATLASLocalDataset": # different behaviour depending on files_per_unit if self.files_per_unit < 0: # check if this data is being run over ok = False for unit in self.units: if set(unit.inputdata.names) == set(inds.names): ok = True if not ok: # new unit required for this dataset unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF( unit ) unit.inputdata = inds else: ok = False curr_data = [] for unit in self.units: curr_data.extend( unit.inputdata.names ) if set(inds.names) in set( curr_data ) or set(inds.names) == set( curr_data ): ok = True if not ok: # new unit(s) required for this dataset num = 0 while num < len( inds.names ): unit = AtlasUnit() unit.name = "Unit %d" % len(self.units) self.addUnitToTRF( unit ) unit.inputdata = inds.clone() unit.inputdata.names = inds.names[num:num + self.files_per_unit] num += self.files_per_unit