def run_dbt_task(self, config: BaseConfig) -> tuple[bool, Optional[RunResult]]: """Run a dbt task with a given configuration and return the results. The configuration used determines the task that will be ran. Returns: A tuple containing a boolean indicating success and optionally the results of running the dbt command. """ extra_target = self.get_target_from_connection(config.target) config.dbt_task.pre_init_hook(config) task, runtime_config = config.create_dbt_task(extra_target) self.ensure_profiles(config.profiles_dir) # When creating tasks via from_args, dbt switches to the project directory. # We have to do that here as we are not using from_args. move_to_nearest_project_dir(config) if not isinstance(runtime_config, (UnsetProfileConfig, type(None))): # The deps command installs the dependencies, which means they may not exist # before deps runs and the following would raise a CompilationError. runtime_config.load_dependencies() results = None with adapter_management(): if not isinstance(runtime_config, (UnsetProfileConfig, type(None))): register_adapter(runtime_config) with track_run(task): results = task.run() success = task.interpret_results(results) return success, results
def dbt_config(self): """Loads the dbt config.""" if self.dbt_version_tuple >= (1, 0): # Here, we read flags.PROFILE_DIR directly, prior to calling # set_from_args(). Apparently, set_from_args() sets PROFILES_DIR # to a lowercase version of the value, and the profile wouldn't be # found if the directory name contained uppercase letters. This fix # was suggested and described here: # https://github.com/sqlfluff/sqlfluff/issues/2253#issuecomment-1018722979 user_config = read_user_config(flags.PROFILES_DIR) flags.set_from_args( DbtConfigArgs( project_dir=self.project_dir, profiles_dir=self.profiles_dir, profile=self._get_profile(), vars=self._get_cli_vars(), ), user_config, ) self.dbt_config = DbtRuntimeConfig.from_args( DbtConfigArgs( project_dir=self.project_dir, profiles_dir=self.profiles_dir, profile=self._get_profile(), target=self._get_target(), vars=self._get_cli_vars(), ) ) register_adapter(self.dbt_config) return self.dbt_config
def dbt_config(self): """Loads the dbt config.""" from dbt.config.runtime import RuntimeConfig as DbtRuntimeConfig from dbt.adapters.factory import register_adapter self.dbt_config = DbtRuntimeConfig.from_args( DbtConfigArgs( project_dir=self._get_project_dir(), profiles_dir=self._get_profiles_dir(), profile=self._get_profile(), )) register_adapter(self.dbt_config) return self.dbt_config
def attempt_connection(profile): """Return a string containing the error message, or None if there was no error. """ register_adapter(profile) adapter = get_adapter(profile) try: with adapter.connection_named('debug'): adapter.debug_query() except Exception as exc: return COULD_NOT_CONNECT_MESSAGE.format( err=str(exc), url=ProfileConfigDocs, ) return None
def _spawn_setup(self): """ Because we're using spawn, we have to do a some things that dbt does dynamically at process load. These things are inherited automatically in fork mode, where fork() keeps everything in memory. """ # reset flags dbt.flags.set_from_args(self.task.args) # reload the active plugin load_plugin(self.task.config.credentials.type) # register it register_adapter(self.task.config) # reset tracking, etc self.task.config.config.set_values(self.task.args.profiles_dir)
def setUp(self): self.database = os.getenv('DBT_MODEL_TEST_DATABASE') self.schema = os.getenv('DBT_MODEL_TEST_SCHEMA') self.identifier_prefix = os.getenv('DBT_MODEL_TEST_IDENTIFIER_PREFIX') reset_adapters() kwargs = { 'profile': 'modeltests', 'profiles_dir': 'conf/', 'target': None, } config = RuntimeConfig.from_args(TestArgs(kwargs)) register_adapter(config) adapter = get_adapter(config) adapter.cleanup_connections() self.adapter = adapter
def built_schema(project_dir, schema, profiles_dir, test_kwargs, project_def): # make our args, write our project out args = TestArgs(profiles_dir=profiles_dir, kwargs=test_kwargs) project_def.write_to(project_dir) # build a config of our own os.chdir(project_dir) start = os.getcwd() try: cfg = RuntimeConfig.from_args(args) finally: os.chdir(start) register_adapter(cfg) adapter = get_adapter(cfg) execute(adapter, 'drop schema if exists {} cascade'.format(schema)) execute(adapter, 'create schema {}'.format(schema)) yield adapter = get_adapter(cfg) adapter.cleanup_connections() execute(adapter, 'drop schema if exists {} cascade'.format(schema))
def tearDown(self): # get any current run adapter and clean up its connections before we # reset them. It'll probably be different from ours because # handle_and_check() calls reset_adapters(). register_adapter(self.config) adapter = get_adapter(self.config) if adapter is not self.adapter: adapter.cleanup_connections() if not hasattr(self, 'adapter'): self.adapter = adapter self._drop_schemas() self.adapter.cleanup_connections() reset_adapters() os.chdir(INITIAL_ROOT) try: shutil.rmtree(self.test_root_dir) except EnvironmentError: logger.exception('Could not clean up after test - {} not removable' .format(self.test_root_dir))
def load_config(self): # we've written our profile and project. Now we want to instantiate a # fresh adapter for the tests. # it's important to use a different connection handle here so # we don't look into an incomplete transaction kwargs = { 'profile': None, 'profiles_dir': self.test_root_dir, 'target': None, } config = RuntimeConfig.from_args(TestArgs(kwargs)) register_adapter(config) adapter = get_adapter(config) adapter.cleanup_connections() self.adapter_type = adapter.type() self.adapter = adapter self.config = config self._drop_schemas() self._create_schemas()
def __init__(self, args, config): super().__init__(args, config) register_adapter(self.config)
def reload_config(self): config = self.config.from_args(self.args) self.config = config reset_adapters() register_adapter(config) return config