def submit_data_unit(self, data_unit_description): """ creates a data unit object and initially imports data specified in data_unit_description """ du = DataUnit(pilot_data=self, data_unit_description=data_unit_description) self.data_unit_urls.append(du.get_url()) du.add_pilot_data(self) return du
def __init__(self, pilot_data=None, data_unit_description=None, du_url=None): """ 1.) create a new Pilot Data: pilot_data_service and data_unit_description required 2.) reconnect to an existing Pilot Data: du_url required """ if du_url==None: self.id = self.DU_ID_PREFIX + str(uuid.uuid1()) self.data_unit_description = data_unit_description self.pilot_data=[] self.state = State.New self.data_unit_items=[] if self.data_unit_description.has_key("file_urls"): self.data_unit_items = DataUnitItem.create_data_unit_list(self, self.data_unit_description["file_urls"]) self.url = None # register a data unit as top-level entry in Redis application_url = CoordinationAdaptor.get_base_url(application_id) self.url = CoordinationAdaptor.add_du(application_url, self) CoordinationAdaptor.update_du(self) # Deprecated # old method only allowed the creation of a du if a pd existed #if pilot_data!=None: # # Allow data units that are not connected to a resource! # self.url = CoordinationAdaptor.add_du(pilot_data.url, self) # CoordinationAdaptor.update_du(self) else: self.id = DataUnit._get_du_id(du_url) self.url = du_url logger.debug("Restore du: %s"%self.id) self.__restore_state() self.transfer_threads=[]
def wait(self): """ Wait until PD enters a final state (Done, Canceled or Failed).""" while 1: finish_counter=0 result_map = {} for du_url in self.data_units_urls: du = DataUnit(du_url=du_url) du.wait() state = du.get_state() #state = job_detail["state"] if result_map.has_key(state)==False: result_map[state]=1 else: result_map[state] = result_map[state]+1 if self.__has_finished(state)==True: finish_counter = finish_counter + 1 logger.debug("PD ID: " + str(self.id) + " Total DUs: %s States: %s"%(len(self.data_units_urls), str(result_map))) if finish_counter == len(self.data_units_urls): break time.sleep(2)
def wait(self): """ Wait until PD enters a final state (Done, Canceled or Failed).""" while 1: finish_counter = 0 result_map = {} for du_url in self.data_units_urls: du = DataUnit(du_url=du_url) du.wait() state = du.get_state() #state = job_detail["state"] if result_map.has_key(state) == False: result_map[state] = 1 else: result_map[state] = result_map[state] + 1 if self.__has_finished(state) == True: finish_counter = finish_counter + 1 logger.debug("PD ID: " + str(self.id) + " Total DUs: %s States: %s" % (len(self.data_units_urls), str(result_map))) if finish_counter == len(self.data_units_urls): break time.sleep(2)
def __init__(self, pilot_data=None, data_unit_description=None, du_url=None): """ 1.) create a new Pilot Data: pilot_data_service and data_unit_description required 2.) reconnect to an existing Pilot Data: du_url required """ if du_url == None: self.id = self.DU_ID_PREFIX + str(uuid.uuid1()) self.data_unit_description = data_unit_description self.pilot_data = [] self.state = State.New self.data_unit_items = [] if self.data_unit_description.has_key("file_urls"): self.data_unit_items = DataUnitItem.create_data_unit_list( self, self.data_unit_description["file_urls"]) self.url = None # register a data unit as top-level entry in Redis application_url = CoordinationAdaptor.get_base_url(application_id) self.url = CoordinationAdaptor.add_du(application_url, self) CoordinationAdaptor.update_du(self) # Deprecated # old method only allowed the creation of a du if a pd existed #if pilot_data!=None: # # Allow data units that are not connected to a resource! # self.url = CoordinationAdaptor.add_du(pilot_data.url, self) # CoordinationAdaptor.update_du(self) else: self.id = DataUnit._get_du_id(du_url) self.url = du_url logger.debug("Restore du: %s" % self.id) self.__restore_state() self.transfer_threads = []
def create_data_unit_from_dict(cls, du_dict): du = DataUnit() logger.debug("Restore DU: " + str(du_dict)) for i in du_dict.keys(): du.__setattr__(i, du_dict[i]) return du
def test_du_reconnect(): du_url = "redis://localhost/bigdata:du-1d1b7078-229f-11e2-834e-705681b3df0f" du = DataUnit(du_url=du_url) logger.debug(str(du.list())) du.export("/tmp/export-test")
def get_du(self, du_url): """ Returns Data Unit if part of Pilot Data """ if self.data_unit_urls.count(du_url) > 0: du = DataUnit(du_url=du_url) return du return None