def run_submission_with_high_level_api(backend, entry_point, capsys,
                                       expected_result):
    py_version = ".".join([str(x) for x in sys.version_info[0:3]])
    base_image = 'python:{}'.format(py_version)

    train_job = TrainJob(entry_point=entry_point,
                         base_docker_image=base_image,
                         docker_registry=DOCKER_REGISTRY,
                         backend=backend,
                         input_files=["requirements.txt"])
    train_job.submit()
    captured = capsys.readouterr()
    assert expected_result in captured.out
Exemplo n.º 2
0
            local_data_dir="~/tensorflow_datasets")
        model.train()

    else:
        #using Fairing
        GCP_PROJECT = fairing.cloud.gcp.guess_project_name()
        DOCKER_REGISTRY = 'gcr.io/{}/fairing-job'.format(GCP_PROJECT)
        BuildContext = None
        FAIRING_BACKEND = 'KubeflowGKEBackend'
        BackendClass = getattr(
            importlib.import_module('kubeflow.fairing.backends'),
            FAIRING_BACKEND)

        data_files = [
            'tensorflow_datasets/downloads/ai.stanfor.edu_amaas_sentime_aclImdb_v1xA90oY07YfkP66HhdzDg046Ll8Bf3nAIlC6Rkj0WWP4.tar.gz',
            'tensorflow_datasets/downloads/ai.stanfor.edu_amaas_sentime_aclImdb_v1xA90oY07YfkP66HhdzDg046Ll8Bf3nAIlC6Rkj0WWP4.tar.gz.INFO',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/dataset_info.json',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/imdb_reviews-test.tfrecord-00000-of-00001',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/imdb_reviews-train.tfrecord-00000-of-00001',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/imdb_reviews-unsupervised.tfrecord-00000-of-00001',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/label.labels.txt',
            'tensorflow_datasets/imdb_reviews/subwords8k/1.0.0/text.text.subwords',
            'requirements.txt'
        ]

        train_job = TrainJob(
            MovieReviewClassification,
            input_files=data_files,
            docker_registry=DOCKER_REGISTRY,
            backend=BackendClass(build_context_source=BuildContext))
        train_job.submit()