def testPolicyCopy(self): p = Policy.createPolicy(os.path.join(proddir, "examples", "EventTransmitter_policy.paf")) pp = Policy(p, True) self.assertEqual(p.get("transmitter.serializationFormat"), "deluxe") self.assertEqual(pp.getString("transmitter.serializationFormat"), "deluxe") p = None self.assertEqual(pp.getString("transmitter.serializationFormat"), "deluxe")
def testPolicyCopy(self): p = Policy.createPolicy("examples/EventTransmitter_policy.paf") pp = Policy(p, True) self.assertEquals(p.get("transmitter.serializationFormat"), "deluxe") self.assertEquals(pp.getString("transmitter.serializationFormat"), "deluxe") p = None self.assertEquals(pp.getString("transmitter.serializationFormat"), "deluxe")
class _DataReadyComp(object): def setup(self, policyDict="DataReady_dict.paf"): deffile = DefaultPolicyFile("ctrl_sched", policyDict, "policies") defpol = Policy.createPolicy(deffile, deffile.getRepositoryPath()) if not hasattr(self,"policy") or not self.policy: self.policy = Policy() self.policy.mergeDefaults(defpol.getDictionary()) # self.mode = self.policy.getString("mode") # if self.mode not in "parallel serial": # raise RuntimeError("Stage %s: Unsupported mode: %s" % # (self.getName(), self.mode)) self.clipboardKeys = {} self.clipboardKeys["completedDatasets"] = \ self.policy.getString("inputKeys.completedDatasets") self.clipboardKeys["possibleDatasets"] = \ self.policy.getString("inputKeys.possibleDatasets") self.dataclients = [] clpols = [] if self.policy.exists("datasets"): clpols = self.policy.getPolicyArray("datasets") for pol in clpols: dstype = None if pol.exists("datasetType"): dstype = pol.getString("datasetType") topic = pol.getString("dataReadyEvent") reportAll = pol.getBool("reportAllPossible") client = DataReadyClient(self.getRun(), self.getName(), topic, self.getEventBrokerHost(), dstype, reportAll) self.dataclients.append(client) def tellDataReady(self, clipboard): """ send an event reporting on the output datasets that have been attempted by this pipeline. @param clipboard the pipeline clipboard containing the output datasets """ completed = clipboard.get(self.clipboardKeys["completedDatasets"]) possible = clipboard.get(self.clipboardKeys["possibleDatasets"]) for client in self.dataclients: if not possible: break self.log.log(Log.DEBUG, "completed: " + str(completed)) possible = client.tellDataReady(possible, completed) # update the possible list for the ones we have not reported # on yet. clipboard.put(self.clipboardKeys["possibleDatasets"], possible)
class _DataReadyComp(object): def setup(self, policyDict="DataReady_dict.paf"): deffile = DefaultPolicyFile("ctrl_sched", policyDict, "policies") defpol = Policy.createPolicy(deffile, deffile.getRepositoryPath()) if not hasattr(self, "policy") or not self.policy: self.policy = Policy() self.policy.mergeDefaults(defpol.getDictionary()) # self.mode = self.policy.getString("mode") # if self.mode not in "parallel serial": # raise RuntimeError("Stage %s: Unsupported mode: %s" % # (self.getName(), self.mode)) self.clipboardKeys = {} self.clipboardKeys["completedDatasets"] = \ self.policy.getString("inputKeys.completedDatasets") self.clipboardKeys["possibleDatasets"] = \ self.policy.getString("inputKeys.possibleDatasets") self.dataclients = [] clpols = [] if self.policy.exists("datasets"): clpols = self.policy.getPolicyArray("datasets") for pol in clpols: dstype = None if pol.exists("datasetType"): dstype = pol.getString("datasetType") topic = pol.getString("dataReadyEvent") reportAll = pol.getBool("reportAllPossible") client = DataReadyClient(self.getRun(), self.getName(), topic, self.getEventBrokerHost(), dstype, reportAll) self.dataclients.append(client) def tellDataReady(self, clipboard): """ send an event reporting on the output datasets that have been attempted by this pipeline. @param clipboard the pipeline clipboard containing the output datasets """ completed = clipboard.get(self.clipboardKeys["completedDatasets"]) possible = clipboard.get(self.clipboardKeys["possibleDatasets"]) for client in self.dataclients: if not possible: break self.log.log(Log.DEBUG, "completed: " + str(completed)) possible = client.tellDataReady(possible, completed) # update the possible list for the ones we have not reported # on yet. clipboard.put(self.clipboardKeys["possibleDatasets"], possible)
def testMergeDefaults(self): # from a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") d = Dictionary(self.getTestDictionary("defaults_dictionary_good.paf")) d.loadPolicyFiles(self.getTestDictionary(), True) self.assert_(p.nameCount() == 2) p.mergeDefaults(d) self.assert_(p.valueCount("int_range_count") == 3) self.assert_(p.nameCount() == 7) # from a policy that's really a dictionary p = Policy() pd = Policy(self.getTestDictionary("defaults_dictionary_indirect.paf")) p.mergeDefaults(pd) self.assert_(p.getString("string_type") == "foo") self.assert_(p.getDictionary().isDictionary()) # from a policy that's really a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") pd = Policy(self.getTestDictionary("defaults_dictionary_policy.paf")) pd.loadPolicyFiles(self.getTestDictionary(), True) self.assert_(p.nameCount() == 2) p.mergeDefaults(pd) self.assert_(p.valueCount("int_range_count") == 3) self.assert_(p.nameCount() == 5) # ensure post-load validation p.set("int_range_count", -5) self.assertValidationError(ValidationError.UNKNOWN_NAME, p.add, "unknown", 0) # test throwing validation p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) try: p.mergeDefaults(pd) except ValidationError, e: self.assert_(e.args[0].getErrors("required") == ValidationError.MISSING_REQUIRED)
def testMergeDefaults(self): # from a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") d = Dictionary(self.getTestDictionary("defaults_dictionary_good.paf")) d.loadPolicyFiles(self.getTestDictionary(), True) self.assertEqual(p.nameCount(), 2) p.mergeDefaults(d) self.assertEqual(p.valueCount("int_range_count"), 3) self.assertEqual(p.nameCount(), 7) # from a policy that's really a dictionary p = Policy() pd = Policy(self.getTestDictionary("defaults_dictionary_indirect.paf")) p.mergeDefaults(pd) self.assertEqual(p.getString("string_type"), "foo") self.assertTrue(p.getDictionary().isDictionary()) # from a policy that's really a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") pd = Policy(self.getTestDictionary("defaults_dictionary_policy.paf")) pd.loadPolicyFiles(self.getTestDictionary(), True) self.assertEqual(p.nameCount(), 2) p.mergeDefaults(pd) self.assertEqual(p.valueCount("int_range_count"), 3) self.assertEqual(p.nameCount(), 5) # ensure post-load validation p.set("int_range_count", -5) self.assertValidationError(ValidationError.UNKNOWN_NAME, p.add, "unknown", 0) # test throwing validation p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) try: p.mergeDefaults(pd) except ValidationError as ve: self.assertEqual(ve.getErrors("required"), ValidationError.MISSING_REQUIRED) # non-throwing validation p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) ve = ValidationError("Dictionary_1.py", 1, "testMergeDefaults") p.mergeDefaults(pd, False, ve.cpp) self.assertEqual(ve.getErrors("required"), ValidationError.MISSING_REQUIRED) self.assertEqual(ve.getParamCount(), 1) # non-retention p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") p.mergeDefaults(pd, False) # make sure validate() fails gracefully when no dictionary present self.assertRaiseLCE(DictionaryError, "No dictionary", p.validate, "No dictionary assigned") p.add("unknown", 0) # would be rejected if dictionary was kept # deep merge from a Policy that's not a Dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.mergeDefaults(Policy(self.getTestDictionary("defaults_policy_most.paf"))) self.assertEqual(p.nameCount(), 3) self.assertIs(p.getBool("bool_set_count"), True) self.assertEqual(p.getString("indirect.string_type"), "bar") # propagation of a Dictionary from one Policy to another via mergeDefaults d = Dictionary(self.getTestDictionary("defaults_dictionary_complete.paf")) d.loadPolicyFiles(self.getTestDictionary()) pEmpty = Policy() pEmpty.mergeDefaults(d) self.assertTrue(pEmpty.canValidate()) pPartial = Policy(self.getTestDictionary("defaults_policy_partial.paf")) pPartial.mergeDefaults(pEmpty) self.assertTrue(pPartial.canValidate(), "Dictionary handed off via mergeDefaults.")
def testNested(self): self.assertRaiseLCE(DictionaryError, "policy_bad_subdef.dictionary is a string", Dictionary, "Malformed subdictionary", self.getTestDictionary("nested_dictionary_bad_1.paf")) p = Policy(self.getTestDictionary("nested_policy_good.paf")) self.assertRaiseLCE(DictionaryError, "Unknown Dictionary property", Dictionary, "Malformed subdictionary", self.getTestDictionary("nested_dictionary_bad_2.paf")) d = Dictionary(self.getTestDictionary("nested_dictionary_good.paf")) d.check() self.assertRaiseLCE(lsst.pex.exceptions.LogicError, "dictionaryFile needs to be loaded", d.validate, "dictionaryFile not loaded", p) self.assertFalse(d.hasSubDictionary("policy_1")) self.assertTrue(d.hasSubDictionary("policy_2")) self.assertFalse(d.hasSubDictionary("policy_load")) n = d.loadPolicyFiles(self.getTestDictionary(), True) self.assertTrue(d.hasSubDictionary("policy_load")) self.assertEqual(n, 1) # number of files loaded d.validate(p) ve = ValidationError("Dictionary_1.py", 1, "testNested") p = Policy(self.getTestDictionary("nested_policy_bad.paf")) d.validate(p, ve.cpp) self.assertEqual(ve.getErrors("policy_1"), ValidationError.WRONG_TYPE) self.assertEqual(ve.getErrors("policy_2.foo"), ValidationError.VALUE_DISALLOWED) self.assertEqual(ve.getErrors("policy_2.bar"), ValidationError.MISSING_REQUIRED) self.assertEqual(ve.getErrors("policy_3.baz.qux"), ValidationError.WRONG_TYPE) self.assertEqual(ve.getErrors("policy_3.baz.paisley"), ValidationError.MISSING_REQUIRED) self.assertEqual(ve.getErrors("policy_3.baz.paisley"), ValidationError.MISSING_REQUIRED) self.assertEqual(ve.getErrors("policy_load.height"), ValidationError.MISSING_REQUIRED) self.assertEqual(ve.getParamCount(), 6) # multiple nesting p = Policy(self.getTestDictionary("nested_policy_1.paf")) n = p.loadPolicyFiles(self.getTestDictionary()) self.assertEqual(n, 3) self.assertEqual(p.getString("1.2b.foo"), "bar") d = Dictionary(self.getTestDictionary("nested_dictionary_1.paf")) n = d.loadPolicyFiles(self.getTestDictionary()) self.assertEqual(n, 3) p = Policy(True, d) # load from defaults self.assertEqual(p.getString("1.2a.foo"), "bar") self.assertEqual(p.getString("1.2b.foo"), "bar") # error in child d = Dictionary(self.getTestDictionary("nested_dictionary_bad_child.paf")) d.loadPolicyFiles(self.getTestDictionary()) # this should really be caught during loadPolicyFiles(), above self.assertRaiseLCE(DictionaryError, "Unknown type: \"NotAType\"", d.makeDef("sub.something").getType, "Loaded sub-dictionary specified a bogus type")
class _GetAJobComp(object): def setup(self): deffile = DefaultPolicyFile("ctrl_sched", "GetAJob_dict.paf", "policies") defpol = Policy.createPolicy(deffile, deffile.getRepositoryPath()) if not hasattr(self, "policy") or not self.policy: self.policy = Policy() self.policy.mergeDefaults(defpol.getDictionary()) self.jobid = None self.tagLogger(None) # self.mode = self.policy.getString("mode") # if self.mode not in "parallel serial": # raise RuntimeError("Stage %s: Unsupported mode: %s" % # (self.getName(), self.mode)) self.clipboardKeys = {} self.clipboardKeys["jobIdentity"] = \ self.policy.getString("outputKeys.jobIdentity") self.clipboardKeys["inputDatasets"] = \ self.policy.getString("outputKeys.inputDatasets") self.clipboardKeys["outputDatasets"] = \ self.policy.getString("outputKeys.outputDatasets") self.clipboardKeys["completedDatasets"] = \ self.policy.getString("outputKeys.completedDatasets") self.log.log(Log.INFO - 1, "clipboard keys: " + str(self.clipboardKeys)) topic = self.policy.getString("pipelineEvent") self.client = GetAJobClient(self.getRun(), self.getName(), topic, self.getEventBrokerHost(), self.log) self.log.log(Log.INFO - 1, "Using OriginatorId = %d" % self.client.getOriginatorId()) def setAssignment(self, clipboard): clipboard.put("originatorId", self.client.getOriginatorId()) self.client.tellReady() self.log.log(Log.INFO - 2, "Told JobOffice, I'm ready!") jobid, inputs, outputs = self.client.getAssignment() if jobid is None: raise RuntimeError( "empty assignment from JobOffice (event timed out?)") self.log.log( Log.INFO - 2, "Received assignment for pipeline #" + str(clipboard.get("originatorId"))) # ids is a dictionary allZero = 1 for inputKey, inputValue in inputs[0].ids.iteritems(): # self.log.log(Log.INFO, "key " + str(inputKey) + " value-" + str(inputValue) + "----") if str(inputValue) != "0": allZero = 0 if allZero == 1: self.log.log( Log.INFO, "All of the attributes are zero, denoting noMoreDatasets ") clipboard.put("noMoreDatasets", allZero) clipboard.put(self.clipboardKeys["inputDatasets"], inputs) clipboard.put(self.clipboardKeys["outputDatasets"], outputs) clipboard.put(self.clipboardKeys["completedDatasets"], []) clipboard.put(self.clipboardKeys["jobIdentity"], jobid) self.tagLogger(jobid.copy()) self.log.log(Log.INFO, "Processing job: " + self.jobidStr) def tagLogger(self, jobid): idstr = [] if not jobid: # clear out the previous info if self.jobid: for key in self.jobid.keys(): self._resetLogJobId(self.jobid, key) else: self.jobid = {} jobid = self.jobid idstr.append("unknown") else: self.jobid = jobid for key in self.jobid.keys(): idstr.append("%s=%s" % (key, str(jobid[key]))) # this does not work as intended (i.e. properties do not get into the # intended loggers). Until this is made possible by pex_logging, we will # just add these properties to our own local logger. # # root = Log.getDefaultLog() # root = self.log self.jobidStr = " ".join(idstr) self.log.setPreamblePropertyString("JobId", self.jobidStr) root.setPreamblePropertyString("JobId", self.jobidStr) for key in jobid.keys(): self._setLogJobIdValue(self.log, jobid, key) self._setLogJobIdValue(root, jobid, key) def _resetLogJobId(self, jobid, key): if jobid.has_key(key): if isinstance(jobid[key], int): jobid[key] = -1 elif isinstance(jobid[key], long): jobid[key] = -1L elif isinstance(jobid[key], float): jobid[key] = 0.0 elif isinstance(jobid[key], bool): jobid[key] = False else: jobid[key] = "" def _setLogJobIdValue(self, log, jobid, key): if jobid.has_key(key): name = "JobId_" + key if isinstance(jobid[key], int): log.setPreamblePropertyInt(name, jobid[key]) elif isinstance(jobid[key], long): log.setPreamblePropertyLong(name, jobid[key]) elif isinstance(jobid[key], float): log.setPreamblePropertyDouble(name, jobid[key]) elif isinstance(jobid[key], bool): log.setPreamblePropertyBool(name, jobid[key]) else: log.setPreamblePropertyString(name, jobid[key])
def testMergeDefaults(self): # from a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") d = Dictionary(self.getTestDictionary("defaults_dictionary_good.paf")) d.loadPolicyFiles(self.getTestDictionary(), True) self.assert_(p.nameCount() == 2) p.mergeDefaults(d) self.assert_(p.valueCount("int_range_count") == 3) self.assert_(p.nameCount() == 7) # from a policy that's really a dictionary p = Policy() pd = Policy(self.getTestDictionary("defaults_dictionary_indirect.paf")) p.mergeDefaults(pd) self.assert_(p.getString("string_type") == "foo") self.assert_(p.getDictionary().isDictionary()) # from a policy that's really a non-trivial dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") pd = Policy(self.getTestDictionary("defaults_dictionary_policy.paf")) pd.loadPolicyFiles(self.getTestDictionary(), True) self.assert_(p.nameCount() == 2) p.mergeDefaults(pd) self.assert_(p.valueCount("int_range_count") == 3) self.assert_(p.nameCount() == 5) # ensure post-load validation p.set("int_range_count", -5) self.assertValidationError(ValidationError.UNKNOWN_NAME, p.add, "unknown", 0) # test throwing validation p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) try: p.mergeDefaults(pd) except ValidationError as ve: self.assert_(ve.getErrors("required") == ValidationError.MISSING_REQUIRED) # non-throwing validation p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) ve = ValidationError("Dictionary_1.py", 1, "testMergeDefaults") p.mergeDefaults(pd, False, ve.cpp) self.assert_(ve.getErrors("required") == ValidationError.MISSING_REQUIRED) self.assert_(ve.getParamCount() == 1) # non-retention p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") p.mergeDefaults(pd, False) # make sure validate() fails gracefully when no dictionary present self.assertRaiseLCE(DictionaryError, "No dictionary", p.validate, "No dictionary assigned") p.add("unknown", 0) # would be rejected if dictionary was kept # deep merge from a Policy that's not a Dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.mergeDefaults(Policy(self.getTestDictionary("defaults_policy_most.paf"))) self.assert_(p.nameCount() == 3) self.assert_(p.getBool("bool_set_count") == True) self.assert_(p.getString("indirect.string_type") == "bar") # propagation of a Dictionary from one Policy to another via mergeDefaults d = Dictionary(self.getTestDictionary("defaults_dictionary_complete.paf")) d.loadPolicyFiles(self.getTestDictionary()) pEmpty = Policy() pEmpty.mergeDefaults(d) self.assert_(pEmpty.canValidate()) pPartial = Policy(self.getTestDictionary("defaults_policy_partial.paf")) pPartial.mergeDefaults(pEmpty) self.assert_(pPartial.canValidate(), "Dictionary handed off via mergeDefaults.")
def testNested(self): self.assertRaiseLCE(DictionaryError, "policy_bad_subdef.dictionary is a string", Dictionary, "Malformed subdictionary", self.getTestDictionary("nested_dictionary_bad_1.paf")) p = Policy(self.getTestDictionary("nested_policy_good.paf")) self.assertRaiseLCE(DictionaryError, "Unknown Dictionary property", Dictionary, "Malformed subdictionary", self.getTestDictionary("nested_dictionary_bad_2.paf")) d = Dictionary(self.getTestDictionary("nested_dictionary_good.paf")) d.check() self.assertRaiseLCE(lsst.pex.exceptions.LogicError, "dictionaryFile needs to be loaded", d.validate, "dictionaryFile not loaded", p) self.assert_(not d.hasSubDictionary("policy_1")) self.assert_(d.hasSubDictionary("policy_2")) self.assert_(not d.hasSubDictionary("policy_load")) n = d.loadPolicyFiles(self.getTestDictionary(), True) self.assert_(d.hasSubDictionary("policy_load")) self.assert_(n == 1) # number of files loaded d.validate(p) ve = ValidationError("Dictionary_1.py", 1, "testNested") p = Policy(self.getTestDictionary("nested_policy_bad.paf")) d.validate(p, ve.cpp) self.assert_(ve.getErrors("policy_1") == ValidationError.WRONG_TYPE) self.assert_(ve.getErrors("policy_2.foo") == ValidationError.VALUE_DISALLOWED) self.assert_(ve.getErrors("policy_2.bar") == ValidationError.MISSING_REQUIRED) self.assert_(ve.getErrors("policy_3.baz.qux") == ValidationError.WRONG_TYPE) self.assert_(ve.getErrors("policy_3.baz.paisley") == ValidationError.MISSING_REQUIRED) self.assert_(ve.getErrors("policy_3.baz.paisley") == ValidationError.MISSING_REQUIRED) self.assert_(ve.getErrors("policy_load.height") == ValidationError.MISSING_REQUIRED) self.assert_(ve.getParamCount() == 6) # multiple nesting p = Policy(self.getTestDictionary("nested_policy_1.paf")) n = p.loadPolicyFiles(self.getTestDictionary()) self.assert_(n == 3) self.assert_(p.getString("1.2b.foo") == "bar") d = Dictionary(self.getTestDictionary("nested_dictionary_1.paf")) n = d.loadPolicyFiles(self.getTestDictionary()) self.assert_(n == 3) p = Policy(True, d) # load from defaults self.assert_(p.getString("1.2a.foo") == "bar") self.assert_(p.getString("1.2b.foo") == "bar") # error in child d = Dictionary(self.getTestDictionary("nested_dictionary_bad_child.paf")) d.loadPolicyFiles(self.getTestDictionary()) # this should really be caught during loadPolicyFiles(), above self.assertRaiseLCE(DictionaryError, "Unknown type: \"NotAType\"", d.makeDef("sub.something").getType, "Loaded sub-dictionary specified a bogus type")
# non-retention p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.set("required", "foo") p.mergeDefaults(pd, False) # make sure validate() fails gracefully when no dictionary present self.assertRaisesEx(DictionaryError, "No dictionary", p.validate, "No dictionary assigned") p.add("unknown", 0) # would be rejected if dictionary was kept # deep merge from a Policy that's not a Dictionary p = Policy(self.getTestDictionary("defaults_policy_partial.paf")) p.mergeDefaults(Policy(self.getTestDictionary("defaults_policy_most.paf"))) self.assert_(p.nameCount() == 3) self.assert_(p.getBool("bool_set_count") == True) self.assert_(p.getString("indirect.string_type") == "bar") # propagation of a Dictionary from one Policy to another via mergeDefaults d = Dictionary(self.getTestDictionary("defaults_dictionary_complete.paf")) d.loadPolicyFiles(self.getTestDictionary()) pEmpty = Policy() pEmpty.mergeDefaults(d) self.assert_(pEmpty.canValidate()) pPartial = Policy(self.getTestDictionary("defaults_policy_partial.paf")) pPartial.mergeDefaults(pEmpty) self.assert_(pPartial.canValidate(), "Dictionary handed off via mergeDefaults.") # test the sample code at http://dev.lsstcorp.org/trac/wiki/PolicyHowto def testSampleCode(self): policyFile = DefaultPolicyFile("pex_policy", "defaults_dictionary_complete.paf", "tests/dictionary")
class _GetAJobComp(object): def setup(self): deffile = DefaultPolicyFile("ctrl_sched","GetAJob_dict.paf","policies") defpol = Policy.createPolicy(deffile, deffile.getRepositoryPath()) if not hasattr(self,"policy") or not self.policy: self.policy = Policy() self.policy.mergeDefaults(defpol.getDictionary()) self.jobid = None self.tagLogger(None) # self.mode = self.policy.getString("mode") # if self.mode not in "parallel serial": # raise RuntimeError("Stage %s: Unsupported mode: %s" % # (self.getName(), self.mode)) self.clipboardKeys = {} self.clipboardKeys["jobIdentity"] = \ self.policy.getString("outputKeys.jobIdentity") self.clipboardKeys["inputDatasets"] = \ self.policy.getString("outputKeys.inputDatasets") self.clipboardKeys["outputDatasets"] = \ self.policy.getString("outputKeys.outputDatasets") self.clipboardKeys["completedDatasets"] = \ self.policy.getString("outputKeys.completedDatasets") self.log.log(Log.INFO-1, "clipboard keys: " + str(self.clipboardKeys)) topic = self.policy.getString("pipelineEvent") self.client = GetAJobClient(self.getRun(), self.getName(), topic, self.getEventBrokerHost(), self.log) self.log.log(Log.INFO-1, "Using OriginatorId = %d" % self.client.getOriginatorId()) def setAssignment(self, clipboard): clipboard.put("originatorId", self.client.getOriginatorId()) self.client.tellReady() self.log.log(Log.INFO-2, "Told JobOffice, I'm ready!") jobid, inputs, outputs = self.client.getAssignment() if jobid is None: raise RuntimeError("empty assignment from JobOffice (event timed out?)") self.log.log(Log.INFO-2, "Received assignment for pipeline #" + str(clipboard.get("originatorId"))) # ids is a dictionary allZero = 1 for inputKey, inputValue in inputs[0].ids.iteritems(): # self.log.log(Log.INFO, "key " + str(inputKey) + " value-" + str(inputValue) + "----") if str(inputValue) != "0": allZero = 0 if allZero == 1: self.log.log(Log.INFO, "All of the attributes are zero, denoting noMoreDatasets ") clipboard.put("noMoreDatasets", allZero) clipboard.put(self.clipboardKeys["inputDatasets"], inputs) clipboard.put(self.clipboardKeys["outputDatasets"], outputs) clipboard.put(self.clipboardKeys["completedDatasets"], []) clipboard.put(self.clipboardKeys["jobIdentity"], jobid) self.tagLogger(jobid.copy()) self.log.log(Log.INFO, "Processing job: " + self.jobidStr) def tagLogger(self, jobid): idstr = [] if not jobid: # clear out the previous info if self.jobid: for key in self.jobid.keys(): self._resetLogJobId(self.jobid, key) else: self.jobid = {} jobid = self.jobid idstr.append("unknown") else: self.jobid = jobid for key in self.jobid.keys(): idstr.append("%s=%s" % (key, str(jobid[key]))) # this does not work as intended (i.e. properties do not get into the # intended loggers). Until this is made possible by pex_logging, we will # just add these properties to our own local logger. # # root = Log.getDefaultLog() # root = self.log self.jobidStr = " ".join(idstr) self.log.setPreamblePropertyString("JobId", self.jobidStr) root.setPreamblePropertyString("JobId", self.jobidStr) for key in jobid.keys(): self._setLogJobIdValue(self.log, jobid, key) self._setLogJobIdValue(root, jobid, key) def _resetLogJobId(self, jobid, key): if jobid.has_key(key): if isinstance(jobid[key], int): jobid[key] = -1 elif isinstance(jobid[key], long): jobid[key] = -1L elif isinstance(jobid[key], float): jobid[key] = 0.0 elif isinstance(jobid[key], bool): jobid[key] = False else: jobid[key] = "" def _setLogJobIdValue(self, log, jobid, key): if jobid.has_key(key): name = "JobId_" + key if isinstance(jobid[key], int): log.setPreamblePropertyInt(name, jobid[key]) elif isinstance(jobid[key], long): log.setPreamblePropertyLong(name, jobid[key]) elif isinstance(jobid[key], float): log.setPreamblePropertyDouble(name, jobid[key]) elif isinstance(jobid[key], bool): log.setPreamblePropertyBool(name, jobid[key]) else: log.setPreamblePropertyString(name, jobid[key])