def main(): """Trains a model locally to test get_model().""" train_x, train_y, eval_x, eval_y = load_data() train_y, eval_y = [np.ravel(x) for x in [train_y, eval_y]] params = argparse.Namespace(C=1.0) model = get_model(params) model.fit(train_x, train_y) score = model.score(eval_x, eval_y) print(score)
def main(): config = "config.yaml" model = TFModel(config) model.generate_files() _upload_data_to_gcs(model) pipeline = KfpPipeline(model) # preprocess and upload dataset to expected location. load_data(model.data["train"], model.data["evaluation"]) # define pipeline structure p = pipeline.add_train_component() pipeline.add_deploy_component(parent=p) pipeline.add_predict_component(parent=p) pipeline.print_structure() pipeline.generate_pipeline() # Create batch prediction data in GCS. pred_input = [{ "age": 0.02599666, "workclass": 6, "education_num": 1.1365801, "marital_status": 4, "occupation": 0, "relationship": 1, "race": 4, "capital_gain": 0.14693314, "capital_loss": -0.21713187, "hours_per_week": -0.034039237, "native_country": 38, "income_bracket": 0, }] _upload_input_data_to_gcs(model, pred_input) # Run the pipeline. # pylint: disable=import-outside-toplevel from orchestration import pipeline as kfp_pipeline kfp_pipeline.main()
def _upload_data_to_gcs(model): """Calls the preprocessing fn which uploads train/eval data to GCS.""" load_data(model.data["train"], model.data["evaluation"])
def _upload_data_to_gcs(model): load_data(model.data["train"], model.data["evaluation"])