Beispiel #1
0
class TestProjectConfig(unittest.TestCase):
    def setUp(self):
        project_path = get_file_dir(__file__)
        config_file = project_path + '/master.yaml'
        self.master = AIFlowMaster(config_file=config_file)
        self.master.start()

    def tearDown(self):
        self.master.stop()
        self.master._clear_db()

    def test_load_project_config(self):
        project_path = get_file_dir(__file__)
        set_project_config_file(project_path + "/project.yaml")
        project_desc = get_project_description_from(project_path)
        self.assertEqual(project_desc.project_config.get_master_uri(),
                         "localhost:50051")
        self.assertIsNone(
            project_desc.project_config.get('ai_flow config', None))
        self.assertEqual(project_desc.project_config['ai_flow_home'],
                         '/opt/ai_flow')
        self.assertEqual(
            project_desc.project_config['ai_flow_job_master.host'],
            'localhost')
        self.assertEqual(
            project_desc.project_config['ai_flow_job_master.port'], 8081)
        self.assertEqual(project_desc.project_config['ai_flow_conf'],
                         'taskmanager.slot=2')
        model_meta = af.ModelMeta(name="test", model_type="saved_model")
        model_version = af.ModelVersionMeta(version="11111",
                                            model_path="./tmp/saved_model/",
                                            model_metric="./tmp/saved_model/",
                                            model_id=0)
        processed = af.predict(
            input_data_list=[input_example],
            model_info=model_meta,
            model_version_info=model_version,
            executor=faf.flink_executor.FlinkJavaExecutor(
                java_class="com.apache.flink.ai.flow.TestPredict"))

        af.write_example(input_data=processed,
                         example_info=example_2,
                         exec_args=ExecuteArgs(
                             batch_properties=batch_args_2,
                             stream_properties=stream_args_2))

    g = af.default_graph()
    workflow = af.compile_workflow(project_path=test_util.get_project_path())
    print(dumps(list(workflow.jobs.values())[0]))


if __name__ == '__main__':
    config_file = test_util.get_master_config_file()
    master = AIFlowMaster(config_file=config_file)
    master.start()
    test_util.set_project_config(__file__)
    run_flink_predict_job()
    master.stop()
 def test_master_start_stop(self):
     config = MasterConfig()
     config.set_db_uri(db_type=DBType.SQLITE, uri="sqlite:///sql.db")
     master = AIFlowMaster(config_file=test_util.get_master_config_file())
     master.start(is_block=False)
     master.stop()