class WordCountPySparkTask(PySparkTask): text = parameter.data counters = parameter.output python_script = relative_path(__file__, "spark_scripts/word_count.py") def application_args(self): return [self.text, self.counters]
def clean_output(self): try: data_folder = relative_path(__file__, "data") shutil.rmtree(data_folder) except FileNotFoundError: pass try: shutil.rmtree("/tmp/bar") except FileNotFoundError: pass
def test_custom_yaml(self): with dbnd_config( { "kubernetes": { "pod_yaml": relative_path(__file__, "custom_pod.yaml"), "container_tag": "dummy_tag", "namespace": "test_namespace", } } ): run = request_builder.dbnd_run(config_name="gcp_k8s_engine") req = run.run_executor.result.load("result") spec = req["spec"] assert spec["dnsPolicy"] == "ClusterFirstWithHostNet"
def _data_for_spark_path(*path): from dbnd_examples.orchestration.dbnd_spark import read_from_multiple_sources return relative_path( read_from_multiple_sources.__file__, "data_for_spark_examples", *path )
from dbnd import relative_path from dbnd._core.utils.basics.path_utils import abs_join from targets import target _scenarios_path = relative_path( __file__, "..", "..", "..", "..", "modules/dbnd/test_dbnd/scenarios" ) def scenario_path(*path): return abs_join(_scenarios_path, *path) def scenario_target(*path): return target(scenario_path(*path))
def _add_local_test_config(calling_file): local_test_file = relative_path(calling_file, TEST_CONF_FILE_NAME) if os.path.exists(local_test_file): dbnd_config.set_from_config_file(local_test_file)
def test_notebook(self): ipynb = relative_path(wine_quality_decorators_py2.__file__, "predict_wine_quality_py27.ipynb") run_test_notebook(ipynb)
def test_notebook(self): ipynb = relative_path(wine_quality.__file__, "wine_quality_as_notebook.ipynb") run_test_notebook(ipynb)
import os from datetime import datetime from airflow import DAG, settings from airflow.executors.sequential_executor import SequentialExecutor from airflow.models import XCom from airflow.utils.dates import days_ago from dbnd import relative_path from dbnd._core.utils.project.project_fs import abs_join from dbnd._core.utils.timezone import utcnow from targets import target logger = logging.getLogger(__name__) _lib_dbnd_airflow_operator_test = relative_path(__file__, "../..", "..", "..") def get_executor_for_test(): try: from dbnd_airflow.executors.simple_executor import InProcessExecutor return InProcessExecutor() except Exception: return SequentialExecutor() def dbnd_airflow_operator_test_path(*path): return abs_join(_lib_dbnd_airflow_operator_test, *path)
from dbnd import relative_path from dbnd._core.utils.project.project_fs import abs_join _airflow_scenarios_default = relative_path(__file__) def dbnd_airflow_test_scenarios_path(*path): return abs_join(_airflow_scenarios_default, *path)
def spark_script(*path): return relative_path(__file__, *path)
def cli_scripts(*path): return relative_path(__file__, *path)
def test_notebook(self): ipynb = relative_path(salad.__file__, "salad_notebook_py27.ipynb") run_test_notebook(ipynb)