class StructuredDataSchema(ApiStrictSchema): type = fields.String(allow_none=True) columns_names = fields.List(fields.String(), load_from="columns", dump_to="columns", allow_none=True) columns_types = fields.Dict(load_from="dtypes", dump_to="dtypes", allow_none=True) # Shape: Tuple of ints-> (records, columns) -> fields.Tuple() added in marshmallow 3.0.0. shape = fields.List(fields.Integer(allow_none=True), allow_none=True) byte_size = fields.Integer(load_from="size.bytes", dump_to="size.bytes", allow_none=True) @pre_load def pre_load(self, data: dict) -> dict: # Support Snowflake & PostgreSQL data_schemas before dbnd-sdk-0.61.0 columns_types = data.get("column_types") if columns_types: data["columns_types"] = columns_types return data @post_load def make_object(self, data) -> DataSchemaArgs: return DataSchemaArgs(**data)
class AirflowServerInfoSchema(ApiStrictSchema): airflow_version = fields.String(allow_none=True) airflow_export_version = fields.String(allow_none=True) airflow_monitor_version = fields.String(allow_none=True) last_sync_time = fields.DateTime(allow_none=True) monitor_error_message = fields.String(allow_none=True) synced_to = fields.DateTime(allow_none=True) api_mode = fields.String(allow_none=True) sync_interval = fields.Integer(allow_none=True) is_sync_enabled = fields.Boolean(allow_none=True) system_alert_definitions = fields.Dict() fetcher = fields.String(allow_none=True) composer_client_id = fields.String(allow_none=True) dag_ids = fields.String(allow_none=True) base_url = fields.String() external_url = fields.String(allow_none=True) source_instance_uid = fields.String(allow_none=True) tracking_source_uid = fields.String() airflow_instance_uid = fields.String( allow_none=True) # TODO_API: deprecate name = fields.String(allow_none=True) env = fields.String(allow_none=True) monitor_status = fields.String(allow_none=True) monitor_config = fields.Nested(MonitorConfigSchema, allow_none=True) airflow_environment = fields.String(allow_none=True) last_seen_dag_run_id = fields.Integer(allow_none=True) last_seen_log_id = fields.Integer(allow_none=True) @post_load def make_object(self, data, **kwargs): return AirflowServerInfo(**data)
class JobSchemaV2(ApiStrictSchema): id = fields.Int() name = fields.Str() user = fields.Str() reported_by_user = fields.Str() is_archived = fields.Boolean() ui_hidden = fields.Boolean() is_airflow_synced = fields.Boolean() project_id = fields.Int() project_name = fields.Str() # computed run_states = fields.Dict() source_link = fields.Str() # joined latest_run_start_time = fields.DateTime() latest_run_state = fields.Str() latest_run_uid = fields.UUID() latest_run_root_task_run_uid = fields.UUID() latest_run_trigger = fields.Str() latest_run_env = fields.Str() source_instance_name = fields.Str() source_type = fields.Str() airflow_instance_name = fields.Str() # TODO_SERVER: API: Deprecate scheduled_job_count = fields.Number()
class AirflowServerInfoSchema(_ApiCallSchema): base_url = fields.String() external_url = fields.String(allow_none=True) airflow_instance_uid = fields.String(allow_none=True) airflow_version = fields.String(allow_none=True) airflow_export_version = fields.String(allow_none=True) airflow_monitor_version = fields.String(allow_none=True) dags_path = fields.String(allow_none=True) logs_path = fields.String(allow_none=True) last_sync_time = fields.DateTime(allow_none=True) monitor_status = fields.String(allow_none=True) monitor_error_message = fields.String(allow_none=True) monitor_start_time = fields.DateTime(allow_none=True) synced_from = fields.DateTime(allow_none=True) synced_to = fields.DateTime(allow_none=True) api_mode = fields.String(allow_none=True) sync_interval = fields.Integer(allow_none=True) is_sync_enabled = fields.Boolean(allow_none=True) fetcher = fields.String(allow_none=True) composer_client_id = fields.String(allow_none=True) active_dags = fields.Dict(allow_none=True) name = fields.String(allow_none=True) env = fields.String(allow_none=True) include_logs = fields.Boolean(allow_none=True) include_task_args = fields.Boolean(allow_none=True) fetch_quantity = fields.Integer(allow_none=True) oldest_incomplete_data_in_days = fields.Integer(allow_none=True) @post_load def make_object(self, data, **kwargs): return AirflowServerInfo(**data)
class DagRunSchema(ApiStrictSchema): dag_id = fields.String(allow_none=True) run_id = fields.String(required=False) dagrun_id = fields.Integer() start_date = fields.DateTime(allow_none=True) state = fields.String() end_date = fields.DateTime(allow_none=True) execution_date = fields.DateTime(allow_none=True) task_args = fields.Dict()
class MLSaveModelSchema(_ApiCallSchema): model = fields.Dict() # pickel job_name = fields.String() is_enabled = fields.Boolean(allow_none=True) data_set = fields.String(allow_none=True) # df? @post_load def make_object(self, data, **kwargs): return MLSaveModel(**data)
class TaskInstanceSchema(ApiStrictSchema): execution_date = fields.DateTime() dag_id = fields.String() state = fields.String(allow_none=True) try_number = fields.Integer() task_id = fields.String() start_date = fields.DateTime(allow_none=True) end_date = fields.DateTime(allow_none=True) log_body = fields.String(allow_none=True) xcom_dict = fields.Dict()
class TaskRunAttemptSchema(ApiStrictSchema): task_run_uid = fields.UUID() task_run_attempt_uid = fields.UUID() state = EnumField(TaskRunState, allow_none=True) timestamp = fields.DateTime(allow_none=True) first_error = fields.Nested(ErrorInfoSchema, allow_none=True) latest_error = fields.Nested(ErrorInfoSchema, allow_none=True) attempt_number = fields.Number(allow_none=True) source = fields.Str(allow_none=True) start_date = fields.DateTime(allow_none=True) end_date = fields.DateTime(allow_none=True) external_links_dict = fields.Dict(allow_none=True)
class MetricSchema(ApiObjectSchema): key = fields.String() value = fields.Raw(allow_none=True) value_str = fields.String(allow_none=True) value_json = fields.Dict(allow_none=True) value_int = fields.Integer(allow_none=True) value_float = fields.Float(allow_none=True) timestamp = fields.DateTime() @post_load def make_object(self, data, **kwargs): return Metric(**data)
class TaskSchema(ApiStrictSchema): upstream_task_ids = fields.List(fields.String()) downstream_task_ids = fields.List(fields.String()) task_type = fields.String() task_source_code = fields.String(allow_none=True) task_source_hash = fields.String(allow_none=True) task_module_code = fields.String(allow_none=True) module_source_hash = fields.String(allow_none=True) dag_id = fields.String() task_id = fields.String() retries = fields.Integer() command = fields.String(allow_none=True) task_args = fields.Dict()
class TaskRunAttemptUpdateArgsSchema(ApiObjectSchema): task_run_uid = fields.UUID() task_run_attempt_uid = fields.UUID() state = EnumField(TaskRunState, allow_none=True) timestamp = fields.DateTime(allow_none=True) error = fields.Nested(ErrorInfoSchema, allow_none=True) attempt_number = fields.Number(allow_none=True) source = fields.Str(allow_none=True) start_date = fields.DateTime(allow_none=True) external_links_dict = fields.Dict(allow_none=True) @post_load def make_object(self, data, **kwargs): return TaskRunAttemptUpdateArgs(**data)
class TaskRunInfoSchema(ApiObjectSchema): task_run_uid = fields.UUID() task_run_attempt_uid = fields.UUID() task_definition_uid = fields.UUID() run_uid = fields.UUID() task_id = fields.String() task_signature = fields.String() task_signature_source = fields.String() task_af_id = fields.String() execution_date = fields.DateTime() name = fields.String() env = fields.String() command_line = fields.String() functional_call = fields.String() has_downstreams = fields.Boolean() has_upstreams = fields.Boolean() is_reused = fields.Boolean() is_dynamic = fields.Boolean() is_system = fields.Boolean() is_skipped = fields.Boolean() is_root = fields.Boolean() output_signature = fields.String() state = EnumField(TaskRunState) target_date = fields.Date(allow_none=True) log_local = fields.String(allow_none=True) log_remote = fields.String(allow_none=True) version = fields.String() task_run_params = fields.Nested(TaskRunParamSchema, many=True) external_links = fields.Dict(allow_none=True) @post_load def make_task_run(self, data, **kwargs): return TaskRunInfo(**data)
class RunInfoSchema(ApiStrictSchema): root_run_uid = fields.UUID() run_uid = fields.UUID() job_name = fields.String() project_name = fields.String(allow_none=True) user = fields.String() name = fields.String() description = fields.String(allow_none=True) state = EnumField(RunState) start_time = fields.DateTime() end_time = fields.DateTime(allow_none=True) # deprecate dag_id = fields.String() cmd_name = fields.String(allow_none=True) execution_date = fields.DateTime() # move to task target_date = fields.Date(allow_none=True) version = fields.String(allow_none=True) driver_name = fields.String() is_archived = fields.Boolean() env_name = fields.String(allow_none=True) cloud_type = fields.String() trigger = fields.String() task_executor = fields.String(allow_none=True) root_run = fields.Nested(RootRunInfoSchema) scheduled_run = fields.Nested(ScheduledRunInfoSchema, allow_none=True) sends_heartbeat = fields.Boolean(default=False, allow_none=True) scheduled_job_name = fields.String(allow_none=True) scheduled_date = fields.DateTime(allow_none=True) external_links = fields.Dict(allow_none=True) @post_load def make_run_info(self, data, **kwargs): return _as_dotted_dict(**data)
class JobSchemaV2(ApiObjectSchema): id = fields.Int() name = fields.Str() user = fields.Str() ui_hidden = fields.Boolean() is_airflow_synced = fields.Boolean() # computed run_states = fields.Dict() airflow_link = fields.Str() # joined latest_run_start_time = fields.DateTime() latest_run_state = fields.Str() latest_run_uid = fields.UUID() latest_run_root_task_run_uid = fields.UUID() latest_run_trigger = fields.Str() latest_run_env = fields.Str() scheduled_job_count = fields.Number()
class DagSchema(ApiStrictSchema): description = fields.String() root_task_ids = fields.List(fields.String()) tasks = fields.Nested(TaskSchema, many=True) owner = fields.String() dag_id = fields.String() schedule_interval = fields.String() catchup = fields.Boolean(allow_none=True) start_date = fields.DateTime(allow_none=True) end_date = fields.DateTime(allow_none=True) is_committed = fields.Boolean() git_commit = fields.String() dag_folder = fields.String() hostname = fields.String() source_code = fields.String(allow_none=True) module_source_hash = fields.String(allow_none=True) is_subdag = fields.Boolean() tags = fields.List(fields.String(), allow_none=True) task_type = fields.String() task_args = fields.Dict() is_active = fields.Boolean(allow_none=True) is_paused = fields.Boolean(allow_none=True)
class SaveExternalLinksSchema(_ApiCallSchema): task_run_attempt_uid = fields.UUID(required=True) external_links_dict = fields.Dict( name=fields.Str(), url=fields.Str(), required=True )
class LogDataframeHistogramSchema(LogTargetSchema): histograms = fields.Dict() descriptive_stats = fields.Dict() timestamp = fields.DateTime()
class AlertDefsSchema(ApiStrictSchema): severity = fields.Str(required=True) type = fields.Str(required=True) user_metric = fields.Str() operator = fields.Str() is_str_value = fields.Bool() created_at = fields.DateTime() scheduled_job_name = fields.Str(attribute="scheduled_job.name") source_instance_name = fields.Method("get_tracking_source_name") env = fields.Method("get_tracking_source_env") # TODO_CORE: API: Deprecate airflow_server_info airflow_instance_name = fields.Method("get_tracking_source_name") project_id = fields.Int(attribute="job.project_id") project_name = fields.Str(attribute="job.project.name") alert_on_historical_runs = fields.Bool() group_uid = fields.Str(allow_none=True, load_from="alert_group_uid") root_group_uid = fields.Str(allow_none=True, load_from="alert_root_group_uid") uid = fields.Str(allow_none=True) value = fields.Str(allow_none=True) job_id = fields.Int(allow_none=True) summary = fields.Str(allow_none=True) job_name = fields.Str(attribute="job.name", allow_none=True) task_repr = fields.Str(allow_none=True) task_name = fields.Str(allow_none=True) custom_name = fields.Str(allow_none=True) original_uid = fields.Str(allow_none=True) advanced_json = fields.Str(allow_none=True) scheduled_job_uid = fields.Str(allow_none=True) custom_description = fields.Str(allow_none=True) ml_alert = fields.Nested(MLAlert, allow_none=True) # Fields for DatasetSlaAlert/DatasetSlaAdvancedAlert alert # -------------------------------------- seconds_delta = fields.Int( allow_none=True) # Converts to datetime.timedelta dataset_partial_name = fields.Str(allow_none=True) datasets_uids = fields.List(fields.Str(), allow_none=True) # Fields for OperationColumnStatAdvancedAlert alert # -------------------------------------- dataset_uid = fields.Str(allow_none=True) # Operation type (e.g. "read", "write", None=any) to filter stats by operation_type = fields.Str(allow_none=True) # Type of MetricRule, found in dbnd_web. Used to build advanced_json metrics_rules = fields.List(fields.Dict(), allow_none=True) # Used only used by the UI affected_datasets = fields.List(fields.Dict(), allow_none=True, dump_only=True) assigned_jobs = fields.Method(serialize="get_assigned_jobs", dump_only=True) is_system = fields.Function(lambda alert_def: alert_def.owner == "system", dump_only=True) def get_tracking_source_name(self, obj): return self._get_tracking_source_instance(obj).name def get_tracking_source_env(self, obj): return self._get_tracking_source_instance(obj).env def _get_tracking_source_instance(self, obj): if obj.job: return obj.job.tracking_source return obj.tracking_source @pre_load def prepere(self, data: dict, **kwargs): value = data.get("value", None) if value is not None: data["value"] = str(data["value"]) return data def get_assigned_jobs(self, alert_def): self_job = (alert_def.job_id, alert_def.job_name) sub_alerts_jobs = ((sub_alert.job_id, sub_alert.job_name) for sub_alert in alert_def.sub_alert_definitions) alert_jobs = chain(sub_alerts_jobs, [self_job]) alert_jobs = set(filter(lambda l: l != (None, None), alert_jobs)) serialized_assigned_jobs = [{ "job_id": job_id, "job_name": job_name } for job_id, job_name in alert_jobs] return serialized_assigned_jobs
class AirflowExportMetaSchema(ApiStrictSchema): airflow_version = fields.String() plugin_version = fields.String() request_args = fields.Dict() metrics = fields.Nested(MetricsSchema)
class MetricsSchema(ApiStrictSchema): performance = fields.Dict() sizes = fields.Dict()