Ejemplo n.º 1
0
    def test_multiflow1(self):
        """ Test multiflow with two Project inputs """
        # Parent projects.
        p1 = Project(self.outputDir + "/p1")
        p2 = Project(self.outputDir + "/p2")
        for p in [p1, p2]:
            # add task 1
            task = admit.File_AT(touch=True)
            task.setkey("file", "File.dat")
            tid1 = p.addtask(task)
            # add task 2
            task = admit.Flow11_AT(alias="at" + p.baseDir[-2])  # at1 or at2
            task.setkey("file", "Flow11.dat")
            tid2 = p.addtask(task, [(tid1, 0)])
            # run flow
            p.run()

        # Multiflow project.
        mflow = Project(self.outputDir + "/mflow")

        # Add parent projects to the multiflow.
        # Note they must be completely up-to-date for this to succeed.
        pid1 = mflow.pm.addProject(self.outputDir + "/p1")
        pid2 = mflow.pm.addProject(self.outputDir + "/p2")

        # Find some ATs to link into the multiflow.
        # Here searching is done by alias name.
        stuples = []
        for pid in [pid1, pid2]:
            alias = "at" + mflow.pm[pid].baseDir[-2]
            ats = mflow.pm.findTaskAlias(pid, alias)
            self.assertEqual(len(ats), 1, "Found wrong number of matches")
            self.assertEqual(ats[0]._alias, alias, "Alias mismatch")
            self.assertNotEqual(ats[0].getProject(), 0, "Null project ID")

            # Add task to the multiflow (must be a root task---no stuples).
            tid = mflow.addtask(ats[0])
            self.assertNotEqual(tid, -1, "mflow.addtask(" + alias + ") failed")
            stuples.append((tid, 0))

        # Combine output from the two newly linked tasks.
        tid = mflow.addtask(admit.FlowN1_AT(file="FlowN1.dat", touch=True),
                            stuples)
        self.assertNotEqual(tid, -1, "mflow.addtask(FlowN1) failed")

        mflow.show()
        # Run the multiflow.
        mflow.run()

        # Make at2 out of date, then re-run the multiflow to update everything.
        at2 = mflow.findtask(lambda at: at._alias == "at2")
        self.assertEqual(len(at2), 1, "Found wrong number of matches for at2")
        self.assertEqual(at2[0]._alias, "at2", "Alias mismatch for at2")
        at2[0].setkey("file", "Flow11-at2.dat")
        mflow.show()
        mflow.run()
Ejemplo n.º 2
0
    def test_multiflow2(self):
        admit.Project = Project()

        # Template multiflow is Flow11+FlowN1+Flow11 with two File inputs.
        mflow = Project(self.outputDir + "/mflow")
        tid1 = mflow.addtask(admit.File_AT())
        tid2 = mflow.addtask(admit.File_AT())
        tid3 = mflow.addtask(admit.Flow11_AT(), [(tid2, 0)])
        tid4 = mflow.addtask(admit.FlowN1_AT(touch=True), [(tid2, 0),
                                                           (tid3, 0)])
        tid5 = mflow.addtask(admit.Flow11_AT(), [(tid4, 0)])
        mflow[tid3].setkey("file", "Flow11a.dat")
        mflow[tid3].setAlias(({'alias1': 0}, 0), "alias11")
        mflow[tid4].setkey("file", "FlowN1a.dat")
        mflow[tid4].setAlias(({'alias2': 0}, 0), "alias22")
        mflow[tid5].setkey("file", "Flow11b.dat")
        mflow[tid5].setAlias(({'alias3': 0}, 0), "alias33")
        mflow[tid5].enabled(False)
        pm = mflow.pm

        for d in [self.outputDir + "/p1", self.outputDir + "/p2"]:
            p = Project(d)
            for f in ["File1.dat", "File2.dat"]:
                p.addtask(admit.File_AT(touch=True, file=f, alias=f[:-4]))
            p.run()
            p.write()
            pid0 = pm.addProject(d)
            assert pid0 != -1

            # Clone template flow onto current project.
            pid = pm.getProjectId(d)
            assert pid == pid0
            at1 = pm.findTaskAlias(pid, "File1")
            at2 = pm.findTaskAlias(pid, "File2")
            assert len(at1) == 1
            assert len(at2) == 1
            id1 = at1[0].id(True)
            id2 = at2[0].id(True)
            mflow.fm.show()
            pm[pid].show()
            pm[pid].fm.clone(id1, (mflow.fm, tid2), {tid1: id2})
            pm[pid].show()
            pm[pid].run()
            pm[pid].write()

            # Dot output test.
            pm[pid][id1].markChanged()
            pm[pid].fm.diagram("%s/flow.dot" % d)
            try:
                os.system("dot -Tpng %s/flow.dot -o %s/flow.png" % (d, d))
            except:
                pass

        del admit.Project
Ejemplo n.º 3
0
    def setUp(self):
        self.assertTrue(admit_import)

        # sometimes CWD is set to self.outputDir that is deleted by
        # tearDown() function, then we need to change back to the parent dir
        try:
            os.getcwd()
        except OSError:
            os.chdir('..')

        self.outputDir = "/tmp/AdmitCodeTest_%d.admit" % os.getpid()
        admit.Project = Project(self.outputDir)
Ejemplo n.º 4
0
    def addProject(self, baseDir):
        """
        Adds a project.

        The project must be completely up to date to be accepted.

        Parameters
        ----------
        baseDir : str
            ADMIT project base directory.

        Returns
        -------
        int
            Project ID number, else -1 if the project was rejected
            (not up-to-date).

        Notes
        -----
        The up-to-date requirement is a safety feature. Managed projects are
        assumed to be quasi-static since tasks linked from it must provide
        valid BDP output at the root of the associated multiflow.
        """
        # Ignore attempts to re-add same project.
        # This will commonly occur when re-running a multiflow script.
        for pid in self._baseDirs:
            if baseDir == self._baseDirs[pid]: return pid

        #project = admit.Project(baseDir)
        project = Project(baseDir)
        stale = project.fm.find(lambda at: at.isstale() == True)
        if not stale:
            pid = 1 + len(self._projects)
            project.project_id = pid
            self._projects[pid] = project
            self._baseDirs[pid] = baseDir

            # Embed project ID in tasks to indicate project ownership.
            for tid in project.fm:
                project.fm[tid].setProject(pid)
        else:
            print "PM.addProject(): Project", baseDir, \
                  "out of date; not added."
            pid = -1

        return pid
Ejemplo n.º 5
0
    def test_Project_find_bdp(self):
        """ test find_bdp() """
        project = Project()

        # add one task
        task = admit.File_AT(touch=True)
        name = "File.dat"
        task.setkey("file", name)
        project.addtask(task)  # add task
        # now add an output bdp
        obdp = admit.File_BDP('Test')
        task.addoutput(obdp)
        self.project.addtask(task)

        # find_bdp() will search Admit output data directory for *.bdp files
        # should return an empty list since no *.bdp file created by this test
        ret = self.project.find_bdp()
        self.assertTrue(len(ret) == 0)
Ejemplo n.º 6
0
 def setUp(self):
     self.verbose = False
     self.testName = "Flow Manager Unit Test"
     self.task = list()
     self.fm = admit.Flow()
     admit.Project = Project('/tmp/FM_%d.admit' % os.getpid())
Ejemplo n.º 7
0
 def tearDown(self):
     if os.path.exists(self.outputDir):
         # create blank project then delete it!
         # otherwise "Device or resource busy" on files in dir
         self.project = Project(self.outputDir)
         shutil.rmtree(self.outputDir)
Ejemplo n.º 8
0
    def test_clone(self):
        admit.Project = Project()
        p1 = Project(self.outputDir + "/clone-p1")
        task = admit.File_AT(touch=True)
        task.setkey("file", "File.dat")
        tid1 = p1.addtask(task)
        #
        task = admit.Flow11_AT(alias="at1")
        task.setkey("file", "Flow11-at1.dat")
        tid2 = p1.addtask(task, [(tid1, 0)])

        p2 = Project(self.outputDir + "/clone-p2")
        task = admit.File_AT(touch=True)
        task.setkey("file", "File.dat")
        tid3 = p2.addtask(task)
        #
        # The following 2-to-1 flow simply inputs the same File BDP twice.
        # This is to exercise cloning a sub-root task with multiple inputs
        # (which is ok since the *sub-flow* stems from only one AT, FlowN1).
        task = admit.FlowN1_AT(alias="at2")
        task.setkey("file", "FlowN1-at2.dat")
        tid4 = p2.addtask(task, [(tid3, 0), (tid3, 0)])
        #
        task = admit.Flow11_AT(alias="at3a")
        task.setkey("file", "Flow11-at3a.dat")
        tid5 = p2.addtask(task, [(tid4, 0)])

        p1.fm.clone(tid2, (p2.fm, tid4))

        # Task to be re-cloned; avoid filename clashes.
        p2[tid5].setkey("file", "Flow11-at3b.dat")
        p2[tid5].setAlias(({'at3b': 0}, 0), "at3bb")

        # We should *not* be able to clone a sub-flow containing ATs
        # receiving inputs beyond the sub-root without an explicit dependency
        # map. Here we append a FlowN1 to p2 to make it depend on the root
        # File AT, outside the sub-flow.
        task = admit.FlowN1_AT(alias="at4")
        task.setkey("file", "FlowN1-at4.dat")
        tid6 = p2.addtask(task, [(tid5, 0), (tid3, 0)])
        try:
            p1.fm.clone(tid2, (p2.fm, tid4))
        except:
            # Clean up aborted clone() (since we're ignoring the failure here).
            # This is highly volatile code users should never imitate!
            p1.fm.remove(p1.fm._tasklevs.keys()[-1])
        else:
            raise Exception, "Non-autonomous clone() unexpectedly succeeded"

        # Non-autonomous sub-flows are ok if all dependencies are explicit.
        try:
            p1.fm.clone(tid2, (p2.fm, tid4), {tid3: tid1})
        except:
            raise Exception, "Non-autonomous clone() unexpectedly failed"

        assert len(p1) == 5

        # This should produce output in clone-p1/ only, not clone-p2/.
        p1.fm.show()
        p1.run()
        p1.write()

        del admit.Project