def _define_basic_job(launch_initial, launch_final): initial_launcher = (local_external_step_launcher if launch_initial else ResourceDefinition.mock_resource()) final_launcher = (local_external_step_launcher if launch_final else ResourceDefinition.mock_resource()) @op(required_resource_keys={"initial_launcher"}) def op1(): return 1 @op(required_resource_keys={"initial_launcher"}) def op2(): return 2 @op(required_resource_keys={"final_launcher"}) def combine(a, b): return a + b @job( resource_defs={ "initial_launcher": initial_launcher, "final_launcher": final_launcher, "io_manager": fs_io_manager, }) def my_job(): combine(op1(), op2()) return my_job
def _define_dynamic_job(launch_initial, launch_final): from typing import List initial_launcher = (local_external_step_launcher if launch_initial else ResourceDefinition.mock_resource()) final_launcher = (local_external_step_launcher if launch_final else ResourceDefinition.mock_resource()) @op(required_resource_keys={"initial_launcher"}, out=DynamicOut(int)) def dynamic_outs(): for i in range(0, 3): yield DynamicOutput(value=i, mapping_key=f"num_{i}") @op def increment(i): return i + 1 @op(required_resource_keys={"final_launcher"}) def total(ins: List[int]): return sum(ins) @job( resource_defs={ "initial_launcher": initial_launcher, "final_launcher": final_launcher, "io_manager": fs_io_manager, }) def my_job(): all_incs = dynamic_outs().map(increment) total(all_incs.collect()) return my_job
def test_local(): # Since we have access to the computation graph independent of the set of resources, we can # test it locally. result = download.execute_in_process( resources={"api": ResourceDefinition.mock_resource()}) assert result.success
def test_mock_resource(): called = {} @solid(required_resource_keys={"test_mock"}) def solid_test_mock(context): assert context.resources.test_mock is not None called["yup"] = True pipeline = PipelineDefinition( name="test_mock_resource", solid_defs=[solid_test_mock], mode_defs=[ModeDefinition(resource_defs={"test_mock": ResourceDefinition.mock_resource()})], ) result = execute_pipeline(pipeline) assert result.success assert called["yup"]
"""isort:skip_file""" # start_define_graph from dagster import graph, op @op(required_resource_keys={"server"}) def interact_with_server(context): context.resources.server.ping_server() @graph def do_stuff(): interact_with_server() # end_define_graph # start_define_jobs from dagster import ResourceDefinition prod_server = ResourceDefinition.mock_resource() local_server = ResourceDefinition.mock_resource() prod_job = do_stuff.to_job(resource_defs={"server": prod_server}, name="do_stuff_prod") local_job = do_stuff.to_job(resource_defs={"local": local_server}, name="do_stuff_local") # end_define_jobs
prod_mode = ModeDefinition(name="prod", resource_defs={ "beam_runner": preconfigure_resource_for_mode( dataflow_beam_runner, "prod"), "refresh_directory": refresh_directory, "outfiles_writer": outfiles_writer }) test_mode = ModeDefinition(name="test", resource_defs={ "beam_runner": ResourceDefinition.mock_resource(), "refresh_directory": refresh_directory, "outfiles_writer": ResourceDefinition.mock_resource() }) @pipeline(mode_defs=[local_mode, dev_mode, prod_mode, test_mode]) def refresh_data_all() -> None: collected_outputs = [ hles_transform_records(hles_extract_records()), cslb_transform_records(cslb_extract_records()), env_transform_records(env_extract_records()), sample_transform_records(sample_extract_records()), eols_transform_records(eols_extract_records())
# isort: skip_file # pylint: disable=unused-argument # pylint: disable=reimported from dagster import ResourceDefinition api_client = ResourceDefinition.mock_resource() def process(data): return data # start_test_before_marker from dagster import op @op def get_data_without_resource(context): dummy_data = [1, 2, 3] # Do not call external apis in tests # return call_api() return dummy_data # end_test_before_marker # start_test_after_marker from dagster import op, graph @op(required_resource_keys={"api"})
# pylint: disable=unused-argument from dagster import ModeDefinition, ResourceDefinition, pipeline, solid from dagster.core.execution.api import execute_pipeline api_client = ResourceDefinition.mock_resource() def process(data): return data # start_mode_test_before_marker @solid def get_data_without_resource(context): dummy_data = [1, 2, 3] # Do not call external apis in tests # return call_api() return dummy_data # end_mode_test_before_marker # start_mode_test_after_marker @solid(required_resource_keys={"api"}) def get_data(context): return context.resources.api.call() @solid
@solid(required_resource_keys={"s3"}) def store_data_in_s3(context, json_responses: dict): for ticker_symbol, response in json_responses.items(): obj = context.resources.s3.Object( "dagster-stock-data", "{date}/{ticker_symbol}.json".format(date="123", ticker_symbol=ticker_symbol), ) obj.put(Body=json.dumps(response)) local_mode = ModeDefinition( name="local_mode", resource_defs={ "slack": mock_slack_resource, "s3": ResourceDefinition.mock_resource() }, ) production_mode = ModeDefinition(name="production_mode", resource_defs={ "slack": slack_resource, "s3": s3_resource }) solid_config = { "solids": { "query_stock_market_data": { "config": { "portfolio": ["AAPL"] } }
context.resources.slack.chat_postMessage(channel="#portfolio-management", text=message) @solid(required_resource_keys={"s3"}) def store_data_in_s3(context, json_responses: dict): for ticker_symbol, response in json_responses.items(): obj = context.resources.s3.Object( "dagster-stock-data", "{date}/{ticker_symbol}.json".format(date="123", ticker_symbol=ticker_symbol), ) obj.put(Body=json.dumps(response)) local_mode = ModeDefinition( name="local_mode", resource_defs={"slack": mock_slack_resource, "s3": ResourceDefinition.mock_resource()}, ) production_mode = ModeDefinition( name="production_mode", resource_defs={"slack": slack_resource, "s3": s3_resource} ) solid_config = { "solids": {"query_stock_market_data": {"config": {"portfolio": ["AAPL"]}}}, } @pipeline( mode_defs=[local_mode, production_mode], preset_defs=[ PresetDefinition(name="local", mode="local_mode", run_config=solid_config), PresetDefinition(
@resource @contextmanager def my_cm_resource(_): yield "foo" def test_cm_resource(): with my_cm_resource(None) as initialized_resource: assert initialized_resource == "foo" # end_test_cm_resource database_resource = ResourceDefinition.mock_resource() database_resource_a = ResourceDefinition.mock_resource() database_resource_b = ResourceDefinition.mock_resource() # start_job_example from dagster import job @job(resource_defs={"database": database_resource}) def do_database_stuff_job(): op_requires_resources() # end_job_example