예제 #1
0
def test_field_things():
    dict_snap = snap_from_dagster_type({
        "req":
        int,
        "opt":
        Field(int, is_required=False),
        "opt_with_default":
        Field(int, is_required=False, default_value=2),
        "req_with_desc":
        Field(int, description="A desc"),
    })

    assert dict_snap.fields and len(dict_snap.fields) == 4

    field_snap_dict = {
        field_snap.name: field_snap
        for field_snap in dict_snap.fields
    }

    assert field_snap_dict["req"].is_required is True
    assert field_snap_dict["req"].description is None
    assert field_snap_dict["opt"].is_required is False
    assert field_snap_dict["opt"].default_provided is False
    assert field_snap_dict["opt"].default_value_as_json_str is None
    assert field_snap_dict["opt_with_default"].is_required is False
    assert field_snap_dict["opt_with_default"].default_provided is True
    assert deserialize_value(
        field_snap_dict["opt_with_default"].default_value_as_json_str) == 2

    assert field_snap_dict["req_with_desc"].is_required is True
    assert field_snap_dict["req_with_desc"].description == "A desc"
예제 #2
0
def test_dead_events():
    snapshot = path.join(path.dirname(path.realpath(__file__)),
                         'dead_events.txt')
    with open(snapshot, 'r') as fd:
        objs = []
        for line in fd.readlines():
            obj = deserialize_value(line)
            assert obj is not None
            objs.append(obj)

    assert len(objs) == 6
예제 #3
0
def _construct_fields(config_type_snap, config_snap_map):
    return {
        field.name: Field(
            construct_config_type_from_snap(config_snap_map[field.type_key],
                                            config_snap_map),
            description=field.description,
            is_required=field.is_required,
            default_value=deserialize_value(field.default_value_as_json_str)
            if field.default_provided else FIELD_NO_DEFAULT_PROVIDED,
        )
        for field in config_type_snap.fields
    }
예제 #4
0
def _construct_fields(
    config_type_snap: ConfigTypeSnap,
    config_snap_map: Dict[str, ConfigTypeSnap],
) -> Dict[str, Field]:
    fields = check.not_none(config_type_snap.fields)
    return {
        cast(str, field.name): Field(
            construct_config_type_from_snap(config_snap_map[field.type_key],
                                            config_snap_map),
            description=field.description,
            is_required=field.is_required,
            default_value=deserialize_value(
                cast(str, field.default_value_as_json_str))
            if field.default_provided else FIELD_NO_DEFAULT_PROVIDED,
        )
        for field in fields
    }
예제 #5
0
    def launch_step(
        self,
        step_context: StepExecutionContext,
        prior_attempts_count: int,
    ) -> Iterator[DagsterEvent]:
        step_run_ref = step_context_to_step_run_ref(step_context,
                                                    prior_attempts_count)
        run_id = step_context.pipeline_run.run_id

        step_run_dir = os.path.join(self.scratch_dir, run_id,
                                    step_run_ref.step_key)
        os.makedirs(step_run_dir)

        step_run_ref_file_path = os.path.join(step_run_dir,
                                              PICKLED_STEP_RUN_REF_FILE_NAME)
        with open(step_run_ref_file_path, "wb") as step_pickle_file:
            pickle.dump(step_run_ref, step_pickle_file)

        command_tokens = [
            sys.executable,
            "-m",
            "dagster.core.execution.plan.local_external_step_main",
            step_run_ref_file_path,
        ]
        # If this is being called within a `capture_interrupts` context, allow interrupts
        # while waiting for the subprocess to complete, so that we can terminate slow or
        # hanging steps
        with raise_execution_interrupts():
            subprocess.call(command_tokens,
                            stdout=sys.stdout,
                            stderr=sys.stderr)

        events_file_path = os.path.join(step_run_dir, PICKLED_EVENTS_FILE_NAME)
        file_manager = LocalFileManager(".")
        events_file_handle = LocalFileHandle(events_file_path)
        events_data = file_manager.read_data(events_file_handle)
        all_events = deserialize_value(pickle.loads(events_data))

        for event in all_events:
            # write each pickled event from the external instance to the local instance
            step_context.instance.handle_new_event(event)
            if event.is_dagster_event:
                yield event.dagster_event
예제 #6
0
 def _get_step_records():
     serialized_records = self.databricks_runner.client.read_file(path)
     if not serialized_records:
         return []
     return deserialize_value(pickle.loads(serialized_records))
예제 #7
0
def test_deserialize_empty_set():
    assert set() == deserialize_value(serialize_value(set()))
    assert frozenset() == deserialize_value(serialize_value(frozenset()))
예제 #8
0
def test_deserialize_value_ok():
    unpacked_tuple = deserialize_value('{"foo": "bar"}')
    assert unpacked_tuple
    assert unpacked_tuple["foo"] == "bar"
 def get_step_events(self, run_id, step_key):
     path = self._dbfs_path(run_id, step_key, PICKLED_EVENTS_FILE_NAME)
     events_data = self.databricks_runner.client.read_file(path)
     return deserialize_value(pickle.loads(events_data))