Ejemplo n.º 1
0
    def testTaxiPipelineCheckDagConstruction(self):
        airflow_config = {
            'schedule_interval': None,
            'start_date': datetime.datetime(2019, 1, 1),
        }

        # Create directory structure and write expected user module file.
        os.makedirs(os.path.join(self._test_dir, 'taxi'))
        module_file = os.path.join(self._test_dir, 'taxi/taxi_utils.py')
        with open(module_file, 'w') as f:
            f.write('# Placeholder user module file.')

        # Patch $HOME directory for pipeline DAG construction.
        original_home = os.environ['HOME']
        os.environ['HOME'] = self._test_dir
        from tfx.examples.chicago_taxi_pipeline import taxi_pipeline_simple  # pylint: disable=g-import-not-at-top
        os.environ['HOME'] = original_home

        logical_pipeline = taxi_pipeline_simple._create_pipeline(
            pipeline_name='Test',
            pipeline_root=self._test_dir,
            data_root=self._test_dir,
            module_file=module_file,
            serving_model_dir=self._test_dir,
            metadata_path=self._test_dir,
            beam_pipeline_args=[])
        self.assertEqual(9, len(logical_pipeline.components))
        pipeline = AirflowDagRunner(
            AirflowPipelineConfig(airflow_config)).run(logical_pipeline)
        self.assertIsInstance(pipeline, models.DAG)
Ejemplo n.º 2
0
 def test_taxi_pipeline_check_dag_construction(self):
   airflow_config = {
       'schedule_interval': None,
       'start_date': datetime.datetime(2019, 1, 1),
   }
   logical_pipeline = taxi_pipeline_simple._create_pipeline()
   self.assertEqual(9, len(logical_pipeline.components))
   pipeline = TfxRunner(airflow_config).run(logical_pipeline)
   self.assertIsInstance(pipeline, AirflowPipeline)
Ejemplo n.º 3
0
 def testTaxiPipelineCheckDagConstruction(self):
     airflow_config = {
         'schedule_interval': None,
         'start_date': datetime.datetime(2019, 1, 1),
     }
     logical_pipeline = taxi_pipeline_simple._create_pipeline(
         pipeline_name='Test',
         pipeline_root=self._test_dir,
         data_root=self._test_dir,
         module_file=self._test_dir,
         serving_model_dir=self._test_dir,
         metadata_path=self._test_dir)
     self.assertEqual(9, len(logical_pipeline.components))
     pipeline = AirflowDagRunner(airflow_config).run(logical_pipeline)
     self.assertIsInstance(pipeline, models.DAG)