def submit_data_unit(self, data_unit_description):
     """ creates a data unit object and initially imports data specified in data_unit_description """
     du = DataUnit(pilot_data=self,
                   data_unit_description=data_unit_description)
     self.data_unit_urls.append(du.get_url())
     du.add_pilot_data(self)
     return du
 def wait(self):
     """ Wait until PD enters a final state (Done, Canceled or Failed)."""
     while 1:
         finish_counter = 0
         result_map = {}
         for du_url in self.data_units_urls:
             du = DataUnit(du_url=du_url)
             du.wait()
             state = du.get_state()
             #state = job_detail["state"]
             if result_map.has_key(state) == False:
                 result_map[state] = 1
             else:
                 result_map[state] = result_map[state] + 1
             if self.__has_finished(state) == True:
                 finish_counter = finish_counter + 1
         logger.debug("PD ID: " + str(self.id) +
                      " Total DUs: %s States: %s" %
                      (len(self.data_units_urls), str(result_map)))
         if finish_counter == len(self.data_units_urls):
             break
         time.sleep(2)
def test_du_reconnect():
    du_url = "redis://localhost/bigdata:du-1d1b7078-229f-11e2-834e-705681b3df0f"
    du = DataUnit(du_url=du_url)
    logger.debug(str(du.list()))
    du.export("/tmp/export-test")
 def get_du(self, du_url):
     """ Returns Data Unit if part of Pilot Data """
     if self.data_unit_urls.count(du_url) > 0:
         du = DataUnit(du_url=du_url)
         return du
     return None