Exemplo n.º 1
0
    def test_gpu_pipeline(self):
        """
        The gpu pipeline from the examples directory
        """
        with tempfile.TemporaryDirectory() as tempdir:
            output_dir = os.path.join(tempdir, 'outputs')
            with open(
                    os.path.join(
                        os.path.dirname(
                            os.path.dirname(os.path.dirname(__file__))),
                        'examples', 'gpu.yaml')) as r:
                pipeline = yaml.load(r)

            o = Orchestrator({
                **pipeline,
                **{
                    'backend': {
                        'type': 'Dummy',
                        'n_workers': 1,
                        'staging_dir': STAGING_DIR
                    },
                    'localization': {
                        'staging_dir': '/mnt/nfs/canine'
                    }
                }
            })

            df = o.run_pipeline(output_dir)

            self.assertTrue((df[('job', 'exit_code')] == '0:0').all())
            for i, row in df.iterrows():
                self.assertTrue(os.path.isfile(row[('outputs', 'stdout')]))
                self.assertTrue(os.path.isfile(row[('outputs', 'stderr')]))

            self.assertEqual(len(df), len(o.job_spec))
Exemplo n.º 2
0
 def setUpClass(cls):
     cls.orchestrator = Orchestrator({
         'name':
         'canine-unittest',
         'backend': {
             'type': 'Dummy',
             'n_workers': 1,
             'staging_dir': STAGING_DIR
         },
         'inputs': {
             'jobIndex': [0, 1, 2, 3, 4],
             'common_file': __file__
         },
         'script': [
             'echo $(hostname) $jobIndex $common_file',
             'touch f1.txt f2.txt f3.txt'
         ],
         'localization': {
             'strategy': 'Batched',
             'staging_dir': '/mnt/nfs/canine'
         },
         'outputs': {
             'output-glob': ('*.txt', lambda x: len(x))
         }
     })
     cls.backend = cls.orchestrator.backend.__enter__()
     cls.localizer = cls.orchestrator._localizer_type(
         cls.orchestrator.backend,
         **cls.orchestrator.localizer_args).__enter__()
Exemplo n.º 3
0
    def test_big_pipeline(self):
        """
        Tests a large-scale pipeline
        """
        with tempfile.TemporaryDirectory() as tempdir:
            output_dir = os.path.join(tempdir, 'outputs')
            o = Orchestrator({
                'adapter': {
                    'product': True
                },
                'inputs': {
                    'i': [*range(25)],
                    'j': [*range(25)],
                },
                'script': [
                    'python3 -c "[print(i,j) for i in range($i) for j in range($j)]"'
                ],
                'backend': {
                    'type': 'Dummy',
                    'n_workers': cpu_count(),
                    'cpus': 1,
                    'staging_dir': STAGING_DIR
                },
                'resources': {
                    'cpus-per-task': 1,
                    'mem-per-cpu': '256M'
                },
                'localization': {
                    'staging_dir': '/mnt/nfs/canine'
                }
            })

            df = o.run_pipeline(output_dir)

            self.assertTrue((df[('job', 'exit_code')] == '0:0').all())
            for i, row in df.iterrows():
                self.assertTrue(os.path.isfile(row[('outputs', 'stdout')]))
                self.assertTrue(os.path.isfile(row[('outputs', 'stderr')]))

            self.assertEqual(len(df), len(o.job_spec))