Exemple #1
0
class TestProjectConfig(unittest.TestCase):
    def setUp(self):
        project_path = get_file_dir(__file__)
        config_file = project_path + '/master.yaml'
        self.master = AIFlowMaster(config_file=config_file)
        self.master.start()

    def tearDown(self):
        self.master.stop()
        self.master._clear_db()

    def test_load_project_config(self):
        project_path = get_file_dir(__file__)
        set_project_config_file(project_path + "/project.yaml")
        project_desc = get_project_description_from(project_path)
        self.assertEqual(project_desc.project_config.get_master_uri(),
                         "localhost:50051")
        self.assertIsNone(
            project_desc.project_config.get('ai_flow config', None))
        self.assertEqual(project_desc.project_config['ai_flow_home'],
                         '/opt/ai_flow')
        self.assertEqual(
            project_desc.project_config['ai_flow_job_master.host'],
            'localhost')
        self.assertEqual(
            project_desc.project_config['ai_flow_job_master.port'], 8081)
        self.assertEqual(project_desc.project_config['ai_flow_conf'],
                         'taskmanager.slot=2')
Exemple #2
0
 def setUp(self):
     project_path = get_file_dir(__file__)
     config_file = project_path + '/master.yaml'
     self.master = AIFlowMaster(config_file=config_file)
     self.master.start()
Exemple #3
0
 def setUpClass(cls) -> None:
     config_file = test_util.get_master_config_file()
     cls.master = AIFlowMaster(config_file=config_file)
     cls.master.start()
     test_util.set_project_config(__file__)
    return os.path.dirname(os.path.dirname(__file__))


def project_config_file():
    return project_path() + '/project.yaml'


def master_config_file():
    return project_path() + '/master.yaml'


def workflow_config_file():
    return project_path() + '/resources/workflow.yaml'


master = AIFlowMaster(config_file=master_config_file())


def master_port():
    return master.master_config.get('master_port')


def deploy_path():
    return master.master_config.get('scheduler').get('properties').get(
        'airflow_deploy_path')


class BaseETETest(unittest.TestCase):
    @classmethod
    def setUpClass(cls) -> None:
        master.start()
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.
#
import argparse
from ai_flow.application_master.master import AIFlowMaster
import logging

if __name__ == '__main__':

    parser = argparse.ArgumentParser()
    parser.add_argument('--config', required=True, help='master config file')
    args = parser.parse_args()
    logging.info(args.config)
    config_file = args.config
    master = AIFlowMaster(config_file=config_file)
    master.start(is_block=True)
        model_meta = af.ModelMeta(name="test", model_type="saved_model")
        model_version = af.ModelVersionMeta(version="11111",
                                            model_path="./tmp/saved_model/",
                                            model_metric="./tmp/saved_model/",
                                            model_id=0)
        processed = af.predict(
            input_data_list=[input_example],
            model_info=model_meta,
            model_version_info=model_version,
            executor=faf.flink_executor.FlinkJavaExecutor(
                java_class="com.apache.flink.ai.flow.TestPredict"))

        af.write_example(input_data=processed,
                         example_info=example_2,
                         exec_args=ExecuteArgs(
                             batch_properties=batch_args_2,
                             stream_properties=stream_args_2))

    g = af.default_graph()
    workflow = af.compile_workflow(project_path=test_util.get_project_path())
    print(dumps(list(workflow.jobs.values())[0]))


if __name__ == '__main__':
    config_file = test_util.get_master_config_file()
    master = AIFlowMaster(config_file=config_file)
    master.start()
    test_util.set_project_config(__file__)
    run_flink_predict_job()
    master.stop()
 def test_master_start_stop(self):
     config = MasterConfig()
     config.set_db_uri(db_type=DBType.SQLITE, uri="sqlite:///sql.db")
     master = AIFlowMaster(config_file=test_util.get_master_config_file())
     master.start(is_block=False)
     master.stop()