def testMakeJobCommandEvent(self): ds = self.testDatasetFromProperty() dss = [ds] for i in xrange(1, 5): ds = copy.deepcopy(ds) ds.ids["ampid"] += 1 dss.append(ds) ods = Dataset("PostISR-CCD", visit=ds.ids["visitid"], ccd=ds.ids["ccdid"]) job = JobItem.createItem(ods, "ccdassembly", dss, ods) jev = self.joboffice.makeJobCommandEvent(job, 9993252, "testing") self.assertEquals(jev.getStatus(), "job:assign") self.assertEquals(jev.getRunId(), "testing") self.assertEquals(jev.getDestinationId(), 9993252) self.assert_(jev.getPropertySet().exists("inputs")) self.assert_(jev.getPropertySet().exists("outputs")) dodss = jev.getPropertySet().getArrayString("inputs") self.assertEquals(len(dodss), 5) i = 5 for ds in dodss: ds = Dataset.fromPolicy(unserializePolicy(ds)) self.assertEquals(ds.type, "PostISR") self.assertEquals(ds.ids["ampid"], i) self.assertEquals(ds.ids["visitid"], 44291) self.assertEquals(ds.ids["ccdid"], 3) self.assertEquals(ds.ids["raftid"], 33) self.assertEquals(ds.ids["snapid"], 0) i += 1
def testProcessDataset(self): with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 0) policy = Policy.createPolicy(os.path.join(exampledir, "ccdassembly-joboffice.paf")) spolicy = policy.getPolicy("schedule") # manipulate the policy idp = Policy.createPolicy(PolicyString(idpolicy)) spolicy.set("job.identity", idp) self.sched = DataTriggeredScheduler(self.bb, spolicy, self.logger) # pdb.set_trace() ds = Dataset("PostISR", visitid=88, ccdid=22, snapid=0, ampid=15) self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 1) self.assertEquals(self.bb.queues.jobsPossible.length(), 1) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.getName(), "Job-1") self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 15) self.assertEquals(self.sched.nameNumber, 2) ds = Dataset("PostISR", visitid=95, ccdid=22, snapid=0, ampid=15) self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 2) self.assertEquals(self.bb.queues.jobsPossible.length(), 2) job = self.bb.queues.jobsPossible.get(1) self.assertEquals(job.getName(), "Job-2") self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 15) inputs = job.getInputDatasets() self.assertEquals(len(inputs), 16) self.assertEquals(inputs[0].type, "PostISR") self.assertEquals(self.sched.nameNumber, 3) ds = Dataset("PostISR", visitid=88, ccdid=22, snapid=0, ampid=14) self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 3) self.assertEquals(self.bb.queues.jobsPossible.length(), 2) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 14) # pdb.set_trace() for i in xrange(14): ds = Dataset("PostISR", visitid=88, ccdid=22, snapid=0, ampid=i) self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 17) self.assertEquals(self.bb.queues.jobsPossible.length(), 2) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 0) self.assert_(job.isReady())
def testFromPolicy(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 p = Policy() p.set("type", type) # pdb.set_trace() ds = Dataset.fromPolicy(p) self.assertEquals(ds.type, type) self.assert_(ds.path is None) self.assert_(ds.ids is None) p.set("path", path) ds = Dataset.fromPolicy(p) self.assertEquals(ds.type, type) self.assertEquals(ds.path, path) self.assert_(ds.ids is None) p.set("ids.ccdid", ccdid) p.set("ids.visitid", visitid) ds = Dataset.fromPolicy(p) self.assertEquals(ds.type, type) self.assertEquals(ds.path, path) self.assert_(ds.ids is not None) self.assertEquals(ds.ids["ccdid"], ccdid) self.assertEquals(ds.ids["visitid"], visitid)
def toDatasets(dsstrs, delim=r'\s', eqdelim='='): out = [] delim = re.compile(r'[%s]' % delim) for dsstr in dsstrs: args = delim.split(dsstr) type = filter(lambda a: a.find(eqdelim) < 0, args) if len(type) > 1: fail("Dataset with multiple types specified: %s", dsstr) if len(type) == 0: type = ["unknown"] ds = Dataset(type[0]) ds.ids = {} for arg in args: if arg.find(eqdelim) < 0: continue parts = arg.split(eqdelim, 1) try: parts[1] = int(parts[1]) except ValueError: pass ds.ids[parts[0]] = parts[1] out.append(ds) return out
def testToString(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type, ids={"ccdid": ccdid, "visitid": visitid}) self.assertEquals(ds.toString(), "%s-ccdid%s-visitid%s" % (type, ccdid, visitid))
def testDatasetType(self, t=None): if not t: t = SimpleTrigger(self.type) ds = Dataset("goob") self.assert_(not t.recognize(ds)) ds = Dataset(self.type) self.assert_(t.recognize(ds))
def testToString(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type, ids={"ccdid": ccdid, "visitid": visitid }) self.assertEquals(ds.toString(), "%s-ccdid%s-visitid%s" % (type, ccdid, visitid))
def setUp(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type, path, ccdid=ccdid, visitid=visitid) self.name = ds.toString() self.bbi = bb.DataProductItem.createItem(dataset=ds, props={"foo": "bar", "count": 3, "files": [ "goob", "gurn"] }) self.initCount = 12
def testToPolicy(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 orig = Dataset(type, path, ccdid=ccdid, visitid=visitid) pol = orig.toPolicy() ds = Dataset.fromPolicy(pol) self.assertEquals(ds.type, type) self.assertEquals(ds.path, path) self.assert_(ds.ids is not None) self.assertEquals(ds.ids["ccdid"], ccdid) self.assertEquals(ds.ids["visitid"], visitid)
def setUp(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type, path, ccdid=ccdid, visitid=visitid) self.name = ds.toString() self.bbi = bb.DataProductItem.createItem(dataset=ds, props={ "foo": "bar", "count": 3, "files": ["goob", "gurn"] }) self.initCount = 12
def toDatasets(lines, ctrl, intids=None): if not isinstance(lines, list): lines = [lines] if intids is None: intids = ctrl["intids"] out = [] if ctrl["format"]: for line in lines: dataset = ctrl["format"].parse(line) out.append(dataset) else: for line in lines: if ctrl["iddelim"]: args = line.split(ctrl["iddelim"]) else: args = line.split() type = filter(lambda a: a.find(ctrl["eqdelim"]) < 0, args) if len(type) > 1: raise ValueError("Multiple dataset types given: " + " ".join(type)) if len(type) == 0: type = ["unknown"] ds = Dataset(type[0]) ds.ids = {} for arg in args: if arg.find(ctrl["eqdelim"]) < 0: continue parts = arg.split(ctrl["eqdelim"], 1) ds.ids[parts[0]] = parts[1] if intids: # convert values of selected to integers for idname in intids: if ds.ids.has_key(idname): try: ds.ids[idname] = int(ds.ids[idname]) except ValueError, ex: raise ValueError("ID %s value is not an int: %s" % (idname, ds.ids[idname])) out.append(ds)
def testCreateName(self): policy = Policy.createPolicy(os.path.join(exampledir, "ccdassembly-joboffice.paf")) spolicy = policy.getPolicy("schedule") sched = DataTriggeredScheduler(self.bb, spolicy, self.logger) ds = Dataset("PostISR", ampid=3) self.assertEquals(sched.createName(ds), "Job-1")
def setUp(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type, path, ccdid=ccdid, visitid=visitid) self.job = bb.JobItem.createItem(ds, "job")
def _getDatasets(self, key): dss = self.getProperty(key) out = [] if dss: if not isinstance(dss, list): dss = [dss] for ds in dss: out.append(Dataset.fromPolicy(ds)) return out
def testIds2(self, t=None): if not t: t = SimpleTrigger(ids=self.idd) ds = Dataset(self.type) self.assert_(not t.recognize(ds)) ds = Dataset(self.type, ccd=5, amp=0) self.assert_(not t.recognize(ds)) ds = Dataset(self.type, ccd=5, amp=0, visit=88) self.assert_(t.recognize(ds)) ds = Dataset(self.type, ccd=5, amp=0, visit=88, filt='r') self.assert_(t.recognize(ds)) ds = Dataset(self.type, ccd=5, amp=0, visit=89, filt='r') self.assert_(not t.recognize(ds))
def getDataset(self): """ return a Dataset instance describing this product or None if a description is not available. """ ds = self.getProperty(self.DATASET) if ds: ds = Dataset.fromPolicy(ds) return ds
def testCreateName2(self): policy = Policy.createPolicy(os.path.join(exampledir, "ccdassembly-joboffice.paf")) spolicy = policy.getPolicy("schedule") spolicy.set("job.name.template", "%(type)s-v%(ampid)s") sched = DataTriggeredScheduler(self.bb, spolicy, self.logger) ds = Dataset("PostISR", ampid=3) self.assertEquals(sched.createName(ds), "PostISR-v3")
def datasetFromProperty(self, policystr): """ convert the given string-encoded policy data into a Dataset @param policystr the policy data written into a string in PAF format. """ try: pol = unserializePolicy(policystr) return Dataset.fromPolicy(pol) except lsst.pex.exceptions.LsstCppException, ex: raise RuntimeError("Dataset encoding error: " + policystr)
def testListJobs2(self): policy = self.schedpolicy.get("schedule.job.input") trigger = Trigger.fromPolicy(policy) ds = Dataset("goob", ids={"skyTile": 95127}) inputs = trigger.listDatasets(ds) self.assertEquals(len(inputs), 8) self.assertEquals(inputs[0].type, 'src') self.assert_(inputs[0].ids.has_key('visit')) self.assert_(inputs[0].ids.has_key('raft')) self.assert_(inputs[0].ids.has_key('sensor')) self.assertEquals(inputs[0].ids['visit'], 85408535)
def testCtor(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds = Dataset(type) self.assertEquals(ds.type, type) self.assert_(ds.path is None) self.assert_(ds.ids is None) ds = Dataset(type, path) self.assertEquals(ds.type, type) self.assertEquals(ds.path, path) self.assert_(ds.ids is None) ds = Dataset(type, ccdid=ccdid, visitid=visitid) self.assertEquals(ds.type, type) self.assert_(ds.path is None) self.assert_(ds.ids is not None) self.assertEquals(ds.ids["ccdid"], ccdid) self.assertEquals(ds.ids["visitid"], visitid) # pdb.set_trace() ds = Dataset(type, path, False, {"ccdid": ccdid, "visitid": visitid}) self.assertEquals(ds.type, type) self.assertEquals(ds.path, path) self.assert_(not ds.valid) self.assert_(ds.ids is not None) self.assertEquals(ds.ids["ccdid"], ccdid) self.assertEquals(ds.ids["visitid"], visitid) ds = Dataset(type, ids={"ccdid": ccdid, "visitid": visitid}) self.assertEquals(ds.type, type) self.assert_(ds.path is None) self.assert_(ds.ids is not None) self.assertEquals(ds.ids["ccdid"], ccdid) self.assertEquals(ds.ids["visitid"], visitid)
def parse(self, line): m = self.search(line) data = m.groupdict() tp = "unknown" if data.has_key("type"): tp = data["type"] out = Dataset(tp) del data["type"] out.ids = data for key in out.ids.keys(): if self.ids[key] == 'i': try: out.ids[key] = int(out.ids[key]) except ValueError, e: warn("Value is not an integer: %s", out.ids[key]) elif self.ids[key] == 'f': try: out.ids[key] = float(out.ids[key]) except ValueError, e: warn("Value is not a floating-point number: %s", out.ids[key])
def setUp(self): self.topic = "test" self.broker = "lsst8.ncsa.uiuc.edu" self.runid = "test1" self.rcvr = EventReceiver(self.broker, self.topic, "RUNID='%s'" % self.runid) self.ds = Dataset("PostISR", ids={ "visit": "9999", "ccd": "22", "amp": "07", "snap": "0" }) names = self.ds.ids.keys() names.sort() self.dsstr = self.ds.type for name in names: self.dsstr += " %s=%s" % (name, self.ds.ids[name])
def testListDatasets(self): t = SimpleTrigger(self.type, self.idd) self.assert_(t.hasPredictableDatasetList()) ds = Dataset(self.type, ccd=5, amp=0, visit=88, filt='r') dss = t.listDatasets(ds) self.assertEquals(len(dss), 8 * 16 * 1) self.assertEquals(dss[0].ids["visit"], 88) self.assert_(dss[0].ids.has_key("filt")) self.assertEquals(dss[0].ids["filt"], 'r') for ds in dss: self.assert_(t.recognize(ds), "failed to recognize " + str(ds)) idd = dict(self.idd) idd["visit"] = IntegerIDFilter("visit", min=87) t = SimpleTrigger(self.type, idd) self.assert_(not t.hasPredictableDatasetList()) dss = t.listDatasets(ds) self.assertEquals(len(dss), 8 * 16 * 1) self.assertEquals(dss[0].ids["visit"], 88) self.assert_(dss[0].ids.has_key("filt")) self.assertEquals(dss[0].ids["filt"], 'r')
def testFormat(self): cmd = announceDataset cmd += seargs % { "runid": self.runid, "topic": self.topic, "broker": self.broker } cmd += " -F '%s'" % "%(type)s-v%(visit)i-c%(ccd)s-a%(amp)s-s%(snap)i.fits" cmd += " -D '%s'" % "PostISR-v9999-c22-a07-s0.fits" ds = Dataset("PostISR", ids={ "visit": 9999, "ccd": "22", "amp": "07", "snap": 0 }) os.system(cmd) event = self.rcvr.receiveEvent(500) self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "%s != %s" % (dss[0], ds))
def testEquals(self): type = "CalExp" path = "goob/CalExp-v88-c12.fits" ccdid = 12 visitid = 88 ds1 = Dataset(type, path, ccdid=ccdid, visitid=visitid) ds2 = Dataset(type, path, ccdid=ccdid, visitid=visitid) self.assert_(ds1 == ds2) self.assertEquals(ds1, ds2) self.assertEquals(ds2, ds1) self.assert_(ds1 in [ds2]) ds2.ids["ccdid"] += 1 self.assertNotEquals(ds1, ds2) self.assertNotEquals(ds2, ds1) self.assert_(ds1 not in [ds2]) ds2 = Dataset(type, path, ccdid=ccdid, visitid=visitid, ampid=5) self.assertNotEquals(ds1, ds2) self.assertNotEquals(ds2, ds1) ds2 = Dataset("junk", path, ccdid=ccdid, visitid=visitid) self.assertNotEquals(ds1, ds2) self.assertNotEquals(ds2, ds1) ds2 = Dataset(type) self.assertNotEquals(ds1, ds2) self.assertNotEquals(ds2, ds1) ds2 = Dataset(None, ccdid=ccdid, visitid=visitid) self.assertNotEquals(ds1, ds2) self.assertNotEquals(ds2, ds1) ds1 = Dataset(None, ccdid=ccdid, visitid=visitid) self.assertEquals(ds1, ds2) self.assertEquals(ds2, ds1)
def _determineJobIdentity(self, outputs, inputs=None): # return an identifier for the job implied by the outputs and inputs. # this identifier is returned in the form of a Dataset type (even # though, semantically, it represents a job. if inputs is None: inputs = [] if self.jobIdConf: # determine our template dataset for our identity template = None if self.jobIdConf.exists("templateType"): # find first dataset (in output, then input) matching # this dataset type. type = self.jobIdConf.getString("templateType") selecttype = lambda d: d.type == type template = filter(selecttype, outputs) if len(template) == 0: template = filter(selecttype, inputs) if len(template) > 0: template = template[0] if not template: # default to the first output (then input) dataset template = len(outputs) > 0 and outputs[0] or inputs[0] out = Dataset(template.type) if self.jobIdConf.exists("type"): out.type = self.jobIdConf.getString("type") if self.jobIdConf.exists("id"): out.ids = {} for id in self.jobIdConf.getStringArray("id"): out.ids[id] = template.ids[id] # the identity dataset is complete return out elif len(outputs) > 0: return outputs[0] elif len(inputs) > 0: return inputs[0] else: return Dataset("unknown")
def _jobItem(self, name, type=""): ds = Dataset(type) return bb.JobItem.createItem(ds, name)
def _datasetItem(self, name, type=""): ds = Dataset(type, name) return bb.DataProductItem.createItem(ds)
def getJobIdentity(self): """ return a Dataset instance representing the unique identity of the Job. """ return Dataset.fromPolicy(self.getProperty(self.JOBIDENTITY))
def testProcessDataset(self): # self.logger.setThreshold(Log.DEBUG) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 0) policy = Policy.createPolicy(os.path.join(exampledir, "srcAssoc-joboffice.paf")) spolicy = policy.getPolicy("schedule") self.sched = ButlerTriggeredScheduler(self.bb, spolicy, self.logger) # pdb.set_trace() ds = Dataset("src", visit=85408535, raft="2,2", sensor="2,2") self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 1) self.assertEquals(self.bb.queues.jobsPossible.length(), 1) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.getName(), "Job-1") self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 14) self.assertEquals(len(job.getInputDatasets()), 15) ods = job.getOutputDatasets() self.assertEquals(len(ods), 7) self.assertEquals(ods[0].type, "source") self.assert_(ods[0].ids.has_key("skyTile")) self.assertEquals(self.sched.nameNumber, 2) # pdb.set_trace() dss = [ Dataset("src", visit=85408535, raft="2,2", sensor="0,2"), Dataset("src", visit=85408535, raft="2,2", sensor="1,1"), Dataset("src", visit=85408535, raft="2,2", sensor="1,2"), Dataset("src", visit=85408535, raft="2,2", sensor="2,0"), Dataset("src", visit=85408535, raft="2,2", sensor="2,1"), Dataset("src", visit=85408535, raft="2,3", sensor="1,0"), Dataset("src", visit=85408535, raft="2,3", sensor="2,0"), Dataset("src", visit=85408535, raft="2,3", sensor="2,1"), Dataset("src", visit=85408535, raft="3,2", sensor="0,0"), Dataset("src", visit=85408535, raft="3,2", sensor="0,1"), Dataset("src", visit=85408535, raft="3,2", sensor="0,2"), Dataset("src", visit=85408535, raft="3,2", sensor="1,2"), Dataset("src", visit=85408535, raft="3,3", sensor="1,0") ] i = 1 for ds in dss: # pdb.set_trace() self.sched.processDataset(ds) i += 1 with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), i) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 15-i) with self.bb.queues: # 9 is empirical; is it really right, though? self.assertEquals(self.bb.queues.jobsPossible.length(), 9) # pdb.set_trace() ds = Dataset("src", visit=85408535, raft="3,3", sensor="0,0") self.sched.processDataset(ds) with self.bb.queues: self.assertEquals(self.bb.queues.dataAvailable.length(), 15) job = self.bb.queues.jobsPossible.get(0) self.assertEquals(job.triggerHandler.getNeededDatasetCount(), 0) self.assert_(job.isReady()) job = self.bb.queues.jobsPossible.get(1) self.assertEquals(job.getName(), "Job-2") self.assert_(not job.isReady())
def _makeDataset(self): return Dataset.fromPolicy(utils.unserializePolicy(postisrdata))
def testFile(self): ds = Dataset("PostISR", visit="888", ccd="10", amp="07", snap="0") cmd = "announceDataset.py" cmd += seargs % {"runid": self.runid, "topic": self.topic, "broker": self.broker } cmd += " %s" % dsfile cmd = cmd.split() os.spawnv(os.P_NOWAIT, announceDataset, cmd) event = self.rcvr.receiveEvent(0) self.assert_(event is None) count = 0 try: event = self.rcvr.receiveEvent(5000); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["amp"] = "08" event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["amp"] = "09" ds.ids["visit"] = 888 ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["visit"] = "888" ds.ids["snap"] = "0" event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "event #%i is not valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use iddelim" % count) self.assert_(dss[0].valid, "event #%i is not valid" % count) event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use eqdelim" % count) ds.ids["visit"] = 888 ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["snap"] = 1 event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["amp"] = "08" ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["amp"] = "08" ds.ids["snap"] = 1 event = self.rcvr.receiveEvent(500); count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) self.assertEquals(count, 17, "lost count of events") finally: for i in xrange(17-count): event = self.rcvr.receiveEvent(50)
def unserializeDataset(datasetstr): """ turn PAF-serialized string back into a Dataset. This is the opposite of serializeDataset(). """ return Dataset.fromPolicy(unserializePolicy(datasetstr))
def testFile(self): ds = Dataset("PostISR", visit="888", ccd="10", amp="07", snap="0") cmd = "announceDataset.py" cmd += seargs % { "runid": self.runid, "topic": self.topic, "broker": self.broker } cmd += " %s" % dsfile cmd = cmd.split() os.spawnv(os.P_NOWAIT, announceDataset, cmd) event = self.rcvr.receiveEvent(0) self.assert_(event is None) count = 0 try: event = self.rcvr.receiveEvent(5000) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["amp"] = "08" event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["amp"] = "09" ds.ids["visit"] = 888 ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) ds.ids["visit"] = "888" ds.ids["snap"] = "0" event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "event #%i is not valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(not dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds) self.assert_(dss[0].valid, "failed event #%i is valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use iddelim" % count) self.assert_(dss[0].valid, "event #%i is not valid" % count) event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals(dss[0], ds, "event #%i failed to use eqdelim" % count) ds.ids["visit"] = 888 ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals( dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["snap"] = 1 event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals( dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["amp"] = "08" ds.ids["snap"] = 0 event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals( dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) ds.ids["amp"] = "08" ds.ids["snap"] = 1 event = self.rcvr.receiveEvent(500) count += 1 self.assert_(event is not None) dss = self.extractDatasets(event) self.assertEquals(len(dss), 1) self.assertEquals( dss[0], ds, "event #%i failed to use format: %s != %s" % (count, dss[0], ds)) self.assertEquals(count, 17, "lost count of events") finally: for i in xrange(17 - count): event = self.rcvr.receiveEvent(50)