class JobSchemaV2(ApiStrictSchema): id = fields.Int() name = fields.Str() user = fields.Str() reported_by_user = fields.Str() is_archived = fields.Boolean() ui_hidden = fields.Boolean() is_airflow_synced = fields.Boolean() project_id = fields.Int() project_name = fields.Str() # computed run_states = fields.Dict() source_link = fields.Str() # joined latest_run_start_time = fields.DateTime() latest_run_state = fields.Str() latest_run_uid = fields.UUID() latest_run_root_task_run_uid = fields.UUID() latest_run_trigger = fields.Str() latest_run_env = fields.Str() source_instance_name = fields.Str() source_type = fields.Str() airflow_instance_name = fields.Str() # TODO_SERVER: API: Deprecate scheduled_job_count = fields.Number()
class ScheduledJobSchemaV2(Schema): class Meta: strict = True uid = fields.Str(attribute="DbndScheduledJob.uid", allow_none=True) name = fields.Str(attribute="DbndScheduledJob.name", required=True) cmd = fields.Str(attribute="DbndScheduledJob.cmd", required=True) schedule_interval = fields.Str( attribute="DbndScheduledJob.schedule_interval", required=True) start_date = fields.DateTime(allow_none=True, attribute="DbndScheduledJob.start_date", format="iso") end_date = fields.DateTime(allow_none=True, attribute="DbndScheduledJob.end_date", format="iso") readable_schedule_interval = fields.Str( attribute="DbndScheduledJob.readable_schedule_interval", allow_none=True) scheduled_interval_in_seconds = fields.Integer( attribute="DbndScheduledJob.scheduled_interval_in_seconds", allow_none=True) catchup = fields.Boolean(allow_none=True, attribute="DbndScheduledJob.catchup") depends_on_past = fields.Boolean( allow_none=True, attribute="DbndScheduledJob.depends_on_past") retries = fields.Int(allow_none=True, attribute="DbndScheduledJob.retries") active = fields.Boolean(allow_none=True, attribute="DbndScheduledJob.active") create_user = fields.Str(allow_none=True, attribute="DbndScheduledJob.create_user") create_time = fields.DateTime(allow_none=True, attribute="DbndScheduledJob.create_time") update_user = fields.Str(allow_none=True, attribute="DbndScheduledJob.update_user") update_time = fields.DateTime(allow_none=True, attribute="DbndScheduledJob.update_time") from_file = fields.Boolean(allow_none=True, attribute="DbndScheduledJob.from_file") deleted_from_file = fields.Boolean( allow_none=True, attribute="DbndScheduledJob.deleted_from_file") next_job_date = fields.DateTime(attribute="DbndScheduledJob.next_job_date", allow_none=True) alerts = fields.List( fields.Nested(AlertEventSchema), attribute="DbndScheduledJob.alerts", allow_none=True, ) job_name = fields.Str(dump_only=True, attribute="DbndScheduledJob.job_name") last_run_uid = fields.UUID(dump_only=True) last_run_job = fields.Str(dump_only=True) last_job_date = fields.DateTime(dump_only=True) last_run_state = fields.Str(dump_only=True) is_airflow_synced = fields.Bool(dump_only=True) list_order = fields.Integer(attribute="DbndScheduledJob.list_order", allow_none=True) validation_errors = fields.Str( allow_none=True, attribute="DbndScheduledJob.validation_errors")
class MonitorConfigSchema(ApiStrictSchema): include_sources = fields.Boolean(required=False) dag_run_bulk_size = fields.Integer(required=False) start_time_window = fields.Integer(required=False) log_bytes_from_head = fields.Integer(required=False) log_bytes_from_end = fields.Integer(required=False) use_async_tracking = fields.Boolean(required=False)
class AirflowServerInfoSchema(_ApiCallSchema): base_url = fields.String() external_url = fields.String(allow_none=True) airflow_instance_uid = fields.String(allow_none=True) airflow_version = fields.String(allow_none=True) airflow_export_version = fields.String(allow_none=True) airflow_monitor_version = fields.String(allow_none=True) dags_path = fields.String(allow_none=True) logs_path = fields.String(allow_none=True) last_sync_time = fields.DateTime(allow_none=True) monitor_status = fields.String(allow_none=True) monitor_error_message = fields.String(allow_none=True) monitor_start_time = fields.DateTime(allow_none=True) synced_from = fields.DateTime(allow_none=True) synced_to = fields.DateTime(allow_none=True) api_mode = fields.String(allow_none=True) sync_interval = fields.Integer(allow_none=True) is_sync_enabled = fields.Boolean(allow_none=True) fetcher = fields.String(allow_none=True) composer_client_id = fields.String(allow_none=True) active_dags = fields.Dict(allow_none=True) name = fields.String(allow_none=True) env = fields.String(allow_none=True) include_logs = fields.Boolean(allow_none=True) include_task_args = fields.Boolean(allow_none=True) fetch_quantity = fields.Integer(allow_none=True) oldest_incomplete_data_in_days = fields.Integer(allow_none=True) @post_load def make_object(self, data, **kwargs): return AirflowServerInfo(**data)
class AirflowServerInfoSchema(ApiStrictSchema): airflow_version = fields.String(allow_none=True) airflow_export_version = fields.String(allow_none=True) airflow_monitor_version = fields.String(allow_none=True) last_sync_time = fields.DateTime(allow_none=True) monitor_error_message = fields.String(allow_none=True) synced_to = fields.DateTime(allow_none=True) api_mode = fields.String(allow_none=True) sync_interval = fields.Integer(allow_none=True) is_sync_enabled = fields.Boolean(allow_none=True) system_alert_definitions = fields.Dict() fetcher = fields.String(allow_none=True) composer_client_id = fields.String(allow_none=True) dag_ids = fields.String(allow_none=True) base_url = fields.String() external_url = fields.String(allow_none=True) source_instance_uid = fields.String(allow_none=True) tracking_source_uid = fields.String() airflow_instance_uid = fields.String( allow_none=True) # TODO_API: deprecate name = fields.String(allow_none=True) env = fields.String(allow_none=True) monitor_status = fields.String(allow_none=True) monitor_config = fields.Nested(MonitorConfigSchema, allow_none=True) airflow_environment = fields.String(allow_none=True) last_seen_dag_run_id = fields.Integer(allow_none=True) last_seen_log_id = fields.Integer(allow_none=True) @post_load def make_object(self, data, **kwargs): return AirflowServerInfo(**data)
class RunInfoSchema(ApiStrictSchema): root_run_uid = fields.UUID() run_uid = fields.UUID() job_name = fields.String() project_name = fields.String(allow_none=True) user = fields.String() name = fields.String() description = fields.String(allow_none=True) state = EnumField(RunState) start_time = fields.DateTime() end_time = fields.DateTime(allow_none=True) # deprecate dag_id = fields.String() cmd_name = fields.String(allow_none=True) execution_date = fields.DateTime() # move to task target_date = fields.Date(allow_none=True) version = fields.String(allow_none=True) driver_name = fields.String() is_archived = fields.Boolean() env_name = fields.String(allow_none=True) cloud_type = fields.String() trigger = fields.String() task_executor = fields.String(allow_none=True) root_run = fields.Nested(RootRunInfoSchema) scheduled_run = fields.Nested(ScheduledRunInfoSchema, allow_none=True) sends_heartbeat = fields.Boolean(default=False, allow_none=True) scheduled_job_name = fields.String(allow_none=True) scheduled_date = fields.DateTime(allow_none=True) external_links = fields.Dict(allow_none=True) @post_load def make_run_info(self, data, **kwargs): return _as_dotted_dict(**data)
class MLSaveModelSchema(_ApiCallSchema): model = fields.Dict() # pickel job_name = fields.String() is_enabled = fields.Boolean(allow_none=True) data_set = fields.String(allow_none=True) # df? @post_load def make_object(self, data, **kwargs): return MLSaveModel(**data)
class JobFromFileSchema(Schema): class Meta: strict = True name = fields.Str(required=True) cmd = fields.Str(required=True) schedule_interval = fields.Str(required=True) start_date = fields.DateTime(allow_none=False, required=True, format="iso") owner = fields.Str(allow_none=False) end_date = fields.DateTime(allow_none=True, format="iso") depends_on_past = fields.Boolean(allow_none=True) catchup = fields.Boolean(allow_none=True) retries = fields.Int(allow_none=True) list_order = fields.Integer(allow_none=True) active = fields.Boolean(allow_none=True)
class JobSchemaV2(ApiObjectSchema): id = fields.Int() name = fields.Str() user = fields.Str() ui_hidden = fields.Boolean() is_airflow_synced = fields.Boolean() # computed run_states = fields.Dict() airflow_link = fields.Str() # joined latest_run_start_time = fields.DateTime() latest_run_state = fields.Str() latest_run_uid = fields.UUID() latest_run_root_task_run_uid = fields.UUID() latest_run_trigger = fields.Str() latest_run_env = fields.Str() scheduled_job_count = fields.Number()
class AirflowServerInfoSchema(_ApiCallSchema): base_url = fields.String() external_url = fields.String() airflow_version = fields.String(allow_none=True) airflow_export_version = fields.String(allow_none=True) airflow_monitor_version = fields.String(allow_none=True) dags_path = fields.String(allow_none=True) logs_path = fields.String(allow_none=True) last_sync_time = fields.DateTime(allow_none=True) monitor_status = fields.String(allow_none=True) monitor_error_message = fields.String(allow_none=True) monitor_start_time = fields.DateTime(allow_none=True) synced_from = fields.DateTime(allow_none=True) synced_to = fields.DateTime(allow_none=True) rbac_enabled = fields.Boolean(allow_none=True) sync_interval = fields.Integer(allow_none=True) is_sync_enabled = fields.Boolean(allow_none=True) @post_load def make_object(self, data, **kwargs): return AirflowServerInfo(**data)
class TaskDefinitionParamSchema(ApiObjectSchema): """ Based on TaskDefinitionParam object """ name = fields.String() default = fields.String(allow_none=True) description = fields.String() group = EnumField(ParameterGroup) kind = EnumField(_ParameterKind) load_on_build = fields.Boolean() significant = fields.Boolean() value_type = fields.String() @post_load def make_task_definition_param(self, data, **kwargs): return _as_dotted_dict(**data)
class DagSchema(ApiStrictSchema): description = fields.String() root_task_ids = fields.List(fields.String()) tasks = fields.Nested(TaskSchema, many=True) owner = fields.String() dag_id = fields.String() schedule_interval = fields.String() catchup = fields.Boolean(allow_none=True) start_date = fields.DateTime(allow_none=True) end_date = fields.DateTime(allow_none=True) is_committed = fields.Boolean() git_commit = fields.String() dag_folder = fields.String() hostname = fields.String() source_code = fields.String(allow_none=True) module_source_hash = fields.String(allow_none=True) is_subdag = fields.Boolean() tags = fields.List(fields.String(), allow_none=True) task_type = fields.String() task_args = fields.Dict() is_active = fields.Boolean(allow_none=True) is_paused = fields.Boolean(allow_none=True)
class ScheduledJobInfoSchema(ApiObjectSchema): uid = fields.UUID() name = fields.String() cmd = fields.String() start_date = fields.DateTime() create_user = fields.String() create_time = fields.DateTime() end_date = fields.DateTime(allow_none=True) schedule_interval = fields.String(allow_none=True) catchup = fields.Boolean(allow_none=True) depends_on_past = fields.Boolean(allow_none=True) retries = fields.Integer(allow_none=True) active = fields.Boolean(allow_none=True) update_user = fields.String(allow_none=True) update_time = fields.DateTime(allow_none=True) from_file = fields.Boolean(allow_none=True) deleted_from_file = fields.Boolean(allow_none=True) list_order = fields.Integer(allow_none=True) job_name = fields.String(allow_none=True) @post_load def make_object(self, data, **kwargs): return ScheduledJobInfo(**data)
class InitRunArgsSchema(ApiObjectSchema): run_uid = fields.UUID() root_run_uid = fields.UUID() driver_task_uid = fields.UUID(allow_none=True) task_run_env = fields.Nested(TaskRunEnvInfoSchema) task_runs_info = fields.Nested(TaskRunsInfoSchema) new_run_info = fields.Nested(RunInfoSchema, allow_none=True) scheduled_run_info = fields.Nested(ScheduledRunInfoSchema, allow_none=True) update_existing = fields.Boolean() source = fields.Str(allow_none=True) @post_load def make_init_run_args(self, data, **kwargs): return InitRunArgs(**data)
class TaskRunsInfoSchema(ApiObjectSchema): run_uid = fields.UUID() root_run_uid = fields.UUID() task_run_env_uid = fields.UUID() parent_child_map = fields.List(fields.List(fields.UUID())) task_runs = fields.Nested(TaskRunInfoSchema, many=True, exclude=("task_signature_source", )) upstreams_map = fields.List(fields.List(fields.UUID())) dynamic_task_run_update = fields.Boolean() targets = fields.Nested(TargetInfoSchema, many=True) task_definitions = fields.Nested(TaskDefinitionInfoSchema, many=True) af_context = fields.Nested(AirflowTaskContextSchema, allow_none=True) @post_load def make_run_info(self, data, **kwargs): return _as_dotted_dict(**data)
class TaskRunEnvInfoSchema(ApiObjectSchema): uid = fields.UUID() cmd_line = fields.String() user = fields.String() machine = fields.String() databand_version = fields.String() user_code_version = fields.String() user_code_committed = fields.Boolean() project_root = fields.String() user_data = fields.String() heartbeat = fields.DateTime() @post_load def make_object(self, data, **kwargs): return TaskRunEnvInfo(**data)
class TaskRunInfoSchema(ApiObjectSchema): task_run_uid = fields.UUID() task_run_attempt_uid = fields.UUID() task_definition_uid = fields.UUID() run_uid = fields.UUID() task_id = fields.String() task_signature = fields.String() task_signature_source = fields.String() task_af_id = fields.String() execution_date = fields.DateTime() name = fields.String() env = fields.String() command_line = fields.String() functional_call = fields.String() has_downstreams = fields.Boolean() has_upstreams = fields.Boolean() is_reused = fields.Boolean() is_dynamic = fields.Boolean() is_system = fields.Boolean() is_skipped = fields.Boolean() is_root = fields.Boolean() output_signature = fields.String() state = EnumField(TaskRunState) target_date = fields.Date(allow_none=True) log_local = fields.String(allow_none=True) log_remote = fields.String(allow_none=True) version = fields.String() task_run_params = fields.Nested(TaskRunParamSchema, many=True) external_links = fields.Dict(allow_none=True) @post_load def make_task_run(self, data, **kwargs): return TaskRunInfo(**data)
class ScheduledJobArgsSchema(_ApiCallSchema): scheduled_job_args = fields.Nested(ScheduledJobInfoSchema) update_existing = fields.Boolean(default=False)
class OkResponseSchema(ApiStrictSchema): ok = fields.Boolean()
class JobsSetArchiveSchema(ApiStrictSchema): ids = fields.List(fields.Integer(), required=True, validate=validate.Length(min=1)) is_archived = fields.Boolean(required=True)