Ejemplo n.º 1
0
def submit_run(run_name, job_name, parameter1, parameter2):
    return databricks.SubmitRunOp(
        name="submitrun",
        run_name=run_name,
        job_name=job_name,
        notebook_params={
            "param1": parameter1,
            "param2": parameter2
        }
    )
def submit_run(run_name, cluster_id, parameter):
    return databricks.SubmitRunOp(
        name="submitrun",
        run_name=run_name,
        existing_cluster_id=cluster_id,
        libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}],
        spark_jar_task={
            "main_class_name": "org.apache.spark.examples.SparkPi",
            "parameters": [parameter]
        }
    )
Ejemplo n.º 3
0
def submit_run(run_name, parameter):
    return databricks.SubmitRunOp(name="submitrun",
                                  run_name=run_name,
                                  new_cluster={
                                      "spark_version": "5.3.x-scala2.11",
                                      "node_type_id": "Standard_D3_v2",
                                      "num_workers": 2
                                  },
                                  libraries=[{
                                      "jar": "dbfs:/docs/sparkpi.jar"
                                  }],
                                  spark_jar_task={
                                      "main_class_name":
                                      "org.apache.spark.examples.SparkPi",
                                      "parameters": [parameter]
                                  })
Ejemplo n.º 4
0
def submit_run(run_name, job_name, parameter):
    return databricks.SubmitRunOp(name="submitrun",
                                  run_name=run_name,
                                  job_name=job_name,
                                  jar_params=[parameter])