def render_template(template: Any, context: Context, *, native: bool) -> Any: """Render a Jinja2 template with given Airflow context. The default implementation of ``jinja2.Template.render()`` converts the input context into dict eagerly many times, which triggers deprecation messages in our custom context class. This takes the implementation apart and retain the context mapping without resolving instead. :param template: A Jinja2 template to render. :param context: The Airflow task context to render the template with. :param native: If set to *True*, render the template into a native type. A DAG can enable this with ``render_template_as_native_obj=True``. :returns: The render result. """ context = copy.copy(context) env = template.environment if template.globals: context.update( (k, v) for k, v in template.globals.items() if k not in context) try: nodes = template.root_render_func( env.context_class(env, context, template.name, template.blocks)) except Exception: env.handle_exception() # Rewrite traceback to point to the template. if native: return jinja2.nativetypes.native_concat(nodes) return "".join(nodes)
def poke(self, context: Context) -> bool: context.update(self.op_kwargs) context['templates_dict'] = self.templates_dict self.op_kwargs = determine_kwargs(self.python_callable, self.op_args, context) self.log.info("Poking callable: %s", str(self.python_callable)) return_value = self.python_callable(*self.op_args, **self.op_kwargs) return bool(return_value)
def execute(self, context: Context) -> Any: context.update(self.op_kwargs) context['templates_dict'] = self.templates_dict self.op_kwargs = self.determine_kwargs(context) return_value = self.execute_callable() if self.show_return_value_in_logs: self.log.info("Done. Returned value was: %s", return_value) else: self.log.info("Done. Returned value not shown") return return_value
def _render_filename(self, ti: "TaskInstance", try_number: int) -> str: if self.filename_jinja_template: if hasattr(ti, "task"): context = ti.get_template_context() else: context = Context(ti=ti, ts=ti.get_dagrun().logical_date.isoformat()) context["try_number"] = try_number return render_template_to_string(self.filename_jinja_template, context) return self.filename_template.format( dag_id=ti.dag_id, task_id=ti.task_id, execution_date=ti.get_dagrun().logical_date.isoformat(), try_number=try_number, )
def _render_filename(self, ti: "TaskInstance", try_number: int) -> str: with create_session() as session: dag_run = ti.get_dagrun(session=session) template = dag_run.get_log_template(session=session).filename str_tpl, jinja_tpl = parse_template_string(template) if jinja_tpl: if hasattr(ti, "task"): context = ti.get_template_context() else: context = Context(ti=ti, ts=dag_run.logical_date.isoformat()) context["try_number"] = try_number return render_template_to_string(jinja_tpl, context) elif str_tpl: dag = ti.task.dag assert dag is not None # For Mypy. try: data_interval: Tuple[ datetime, datetime] = dag.get_run_data_interval(dag_run) except AttributeError: # ti.task is not always set. data_interval = (dag_run.data_interval_start, dag_run.data_interval_end) if data_interval[0]: data_interval_start = data_interval[0].isoformat() else: data_interval_start = "" if data_interval[1]: data_interval_end = data_interval[1].isoformat() else: data_interval_end = "" return str_tpl.format( dag_id=ti.dag_id, task_id=ti.task_id, run_id=ti.run_id, data_interval_start=data_interval_start, data_interval_end=data_interval_end, execution_date=ti.get_dagrun().logical_date.isoformat(), try_number=try_number, ) else: raise RuntimeError( f"Unable to render log filename for {ti}. This should never happen" )
def execute(self, context: Context): serializable_keys = set(self._iter_serializable_context_keys()) serializable_context = context.copy_only(serializable_keys) return super().execute(context=serializable_context)
self._load_sensor_works() self.log.info("Loaded %s sensor_works", len(self.sensor_works)) Stats.gauge("smart_sensor_operator.loaded_tasks", len(self.sensor_works)) for sensor_work in self.sensor_works: self._execute_sensor_work(sensor_work) duration = (timezone.utcnow() - poke_start_time).total_seconds() self.log.info("Taking %s to execute %s tasks.", duration, len(self.sensor_works)) Stats.timing("smart_sensor_operator.loop_duration", duration) Stats.gauge("smart_sensor_operator.executed_tasks", len(self.sensor_works)) self._emit_loop_stats() if duration < self.poke_interval: sleep(self.poke_interval - duration) if (timezone.utcnow() - started_at).total_seconds() > self.timeout: self.log.info("Time is out for smart sensor.") return def on_kill(self): pass if __name__ == '__main__': SmartSensorOperator(task_id='test').execute(Context())