def test_layers(self): with config({"b": dict(a=2)}): config.log_current_config() config.set("core", "a", "1") config.set("core", "b", "1") with config({"core": dict(a=5)}): config.log_current_config(as_table=True) assert config.get("core", "a") == 5 config.log_current_config() config.log_layers()
def new_dbnd_context(conf=None, name=None, **kwargs): # type: (...) -> ContextManager[DatabandContext] with config(config_values=conf, source="inplace"): with DatabandContext.new_context(name=name, allow_override=True, **kwargs) as dc: yield dc
def test_pformat_current_config(self): with config( _tc("test_log_current_config_abc", "test_log_current_config_value"), source="test_source", ): actual = pformat_current_config(config) assert "test_log_current_config_abc" in actual assert "test_log_current_config_value" in actual
def dbnd_tracking(name=None, conf=None): # type: (...) -> TaskRun try: with config(config_values=conf, source="tracking context"): tr = dbnd_run_start(name=name) yield tr finally: dbnd_run_stop()
def test_str_interpolation(self): with config( { "b": dict( a="@python://%s" % "test_dbnd.configuration.test_config_layers._a" ) } ): assert config.get("b", "a") == "from_a"
def test_pformat_table_current_config(self): with config( _tc("test_log_current_config_abc", "test_log_current_config_value"), source="test_source", ): actual = pformat_current_config( config, as_table=True, sections=["test_section"] ) assert "test_log_current_config_abc" in actual assert "test_log_current_config_value" in actual assert "test_source" in actual
def _create_task(cls, args, kwargs): task_definition = cls.task_definition # we need to have context initialized before we start to run all logic in config() scope with config( config_values=task_definition.task_defaults_config_store, source=task_definition.task_passport.format_source_name("defaults"), merge_settings=ConfigMergeSettings.on_non_exists_only, ) as task_config: # update config with current class defaults # we apply them to config only if there are no values (this is defaults) return create_dbnd_task( config=task_config, new_task_factory=cls._build_task_obj, task_cls=cls, task_args=args, task_kwargs=kwargs, )
def test_command_running(): db_path = "sqlite:///" + os.path.abspath( os.path.normpath( os.path.join( os.path.join(os.path.dirname(__file__), "fetch-unittests.db")))) airflow_init_db(db_path) airflow_config = AirflowMonitorConfig() airflow_config.fetcher = "db" airflow_config.sql_alchemy_conn = db_path fetching_configuration = AirflowFetchingConfiguration( url="http://localhost:8082", fetcher="db", composer_client_id=None, sql_alchemy_conn=airflow_config.sql_alchemy_conn, local_dag_folder=airflow_config.local_dag_folder, api_mode="rbac", ) # We need this mock, because otherwise we are going to enter an infinite loop in CI/CD with mock.patch( "airflow_monitor.airflow_monitor_main.save_airflow_monitor_data"): with mock.patch( "airflow_monitor.airflow_monitor_main.save_airflow_server_info" ): with mock.patch( "airflow_monitor.airflow_servers_fetching.AirflowServersGetter.get_fetching_configuration", return_value=[fetching_configuration], ): runner = CliRunner() with config({"core": {"tracker": "console"}}): result = runner.invoke( airflow_monitor, [ "--since", "01/09/2020 10:00:00", "--number-of-iterations", 1, ], ) assert result.exit_code == 0
def _create_task(cls, args, kwargs): task_definition = cls.task_definition # we need to have context initialized before we start to run all logic in config() scope # update config with current class defaults # we apply them to config only if there are no values (this is defaults) with config( config_values=task_definition.task_defaults_config_store, source=task_definition.task_passport.format_source_name( "defaults"), merge_settings=ConfigMergeSettings.on_non_exists_only, ) as task_config: tracking_mode = TaskEssence.TRACKING.is_included(cls) # create task meta first task_meta_factory = (TrackedTaskMetaFactory if tracking_mode else TaskMetaFactory) factory = task_meta_factory(config=task_config, task_cls=cls, task_args=args, task_kwargs=kwargs) task_meta = factory.create_dbnd_task_meta() # If a Task has already been instantiated with the same parameters, # the previous instance is returned to reduce number of object instances. tic = get_databand_context().task_instance_cache task = tic.get_task_obj_by_id(task_meta.obj_key.id) if not task or tracking_mode or hasattr(task, "_dbnd_no_cache"): task = cls._build_task_obj(task_meta) tic.register_task_obj_instance(task) # now the task is created - all nested constructors will see it as parent with task_context(task, TaskContextPhase.BUILD): task._initialize() task._validate() task.task_meta.config_layer = config.config_layer tic.register_task_instance(task) parent_task = try_get_current_task() if (parent_task and hasattr(task, "task_id") and (task.task_essence != TaskEssence.CONFIG)): parent_task.descendants.add_child(task.task_id) return task
def __call__(cls, *args, **kwargs): """ Custom class instantiation utilizing instance cache. """ # use-case of TaskClass() call from airflow context during DAG creation _dbnd_disable_airflow_inplace = kwargs.pop( "_dbnd_disable_airflow_inplace", False) if (is_in_airflow_dag_build_context() and TaskEssence.is_task_cls(cls) and not _dbnd_disable_airflow_inplace and not getattr(cls, "_dbnd_decorated_task", False)): kwargs = kwargs.copy() kwargs["_dbnd_disable_airflow_inplace"] = True return build_task_at_airflow_dag_context(task_cls=cls, call_args=args, call_kwargs=kwargs) task_definition = cls.task_definition # we need to have context initialized before we start to run all logic in config() scope # create new config layer, so when we are out of this process -> config is back to the previous value with config( config_values={}, source=task_definition.task_passport.format_source_name( "ctor"), ) as task_config: factory = TaskFactory( config=task_config, task_cls=cls, task_definition=cls.task_definition, task_args=args, task_kwargs=kwargs, ) task_object = factory.build_task_object(cls) parent_task = try_get_current_task() if (parent_task and hasattr(task_object, "task_id") and (task_object.task_essence != TaskEssence.CONFIG)): parent_task.descendants.add_child(task_object.task_id) return task_object
def test_log_current_config(self): with config( _tc("test_log_current_config_abc", "test_log_current_config_value"), source="test_source", ): config.log_current_config()