def adapter_macro(self, name: str, *args, **kwargs): """Find the most appropriate macro for the name, considering the adapter type currently in use, and call that with the given arguments. If the name has a `.` in it, the first section before the `.` is interpreted as a package name, and the remainder as a macro name. If no adapter is found, raise a compiler exception. If an invalid package name is specified, raise a compiler exception. Some examples: {# dbt will call this macro by name, providing any arguments #} {% macro create_table_as(temporary, relation, sql) -%} {# dbt will dispatch the macro call to the relevant macro #} {{ adapter_macro('create_table_as', temporary, relation, sql) }} {%- endmacro %} {# If no macro matches the specified adapter, "default" will be used #} {% macro default__create_table_as(temporary, relation, sql) -%} ... {%- endmacro %} {# Example which defines special logic for Redshift #} {% macro redshift__create_table_as(temporary, relation, sql) -%} ... {%- endmacro %} {# Example which defines special logic for BigQuery #} {% macro bigquery__create_table_as(temporary, relation, sql) -%} ... {%- endmacro %} """ deprecations.warn('adapter-macro', macro_name=name) original_name = name package_names: Optional[List[str]] = None if '.' in name: package_name, name = name.split('.', 1) package_names = [package_name] try: macro = self.db_wrapper.dispatch(macro_name=name, packages=package_names) except CompilationException as exc: raise CompilationException( f'In adapter_macro: {exc.msg}\n' f" Original name: '{original_name}'", node=self.model) from exc return macro(*args, **kwargs)
def execute_macro( self, macro_name: str, manifest: Optional[Manifest] = None, project: Optional[str] = None, context_override: Optional[Dict[str, Any]] = None, kwargs: Dict[str, Any] = None, release: bool = False, text_only_columns: Optional[Iterable[str]] = None, ) -> agate.Table: """Look macro_name up in the manifest and execute its results. :param macro_name: The name of the macro to execute. :param manifest: The manifest to use for generating the base macro execution context. If none is provided, use the internal manifest. :param project: The name of the project to search in, or None for the first match. :param context_override: An optional dict to update() the macro execution context. :param kwargs: An optional dict of keyword args used to pass to the macro. :param release: Ignored. """ if release is not False: deprecations.warn('execute-macro-release') if kwargs is None: kwargs = {} if context_override is None: context_override = {} if manifest is None: manifest = self._macro_manifest macro = manifest.find_macro_by_name(macro_name, self.config.project_name, project) if macro is None: if project is None: package_name = 'any package' else: package_name = 'the "{}" package'.format(project) raise RuntimeException( 'dbt could not find a macro with the name "{}" in {}'.format( macro_name, package_name)) # This causes a reference cycle, as generate_runtime_macro() # ends up calling get_adapter, so the import has to be here. from dbt.context.providers import generate_runtime_macro macro_context = generate_runtime_macro(macro=macro, config=self.config, manifest=manifest, package_name=project) macro_context.update(context_override) macro_function = MacroGenerator(macro, macro_context) with self.connections.exception_handler(f'macro {macro_name}'): result = macro_function(**kwargs) return result
def _materialization_relations(self, result: Any, model) -> List[BaseRelation]: if isinstance(result, str): deprecations.warn('materialization-return', materialization=model.get_materialization()) return [self.adapter.Relation.create_from(self.config, model)] if isinstance(result, dict): return _validate_materialization_relations_dict(result, model) msg = ('Invalid return value from materialization, expected a dict ' 'with key "relations", got: {}'.format(str(result))) raise CompilationException(msg, node=model)
def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: # this is the default for now quote_columns: bool = False if isinstance(quote_config, bool): quote_columns = quote_config elif quote_config is None: deprecations.warn('column-quoting-unset') else: raise_compiler_error( f'The seed configuration value of "quote_columns" has an ' f'invalid type {type(quote_config)}') if quote_columns: return self.quote(column) else: return column
def update_parsed_node_schema( self, parsed_node: IntermediateNode, config_dict: Dict[str, Any] ) -> None: # Special macro defined in the global project. Use the root project's # definition, not the current package schema_override = config_dict.get('schema') get_schema = self.get_schema_func() try: schema = get_schema(schema_override, parsed_node) except dbt.exceptions.CompilationException as exc: too_many_args = ( "macro 'dbt_macro__generate_schema_name' takes not more than " "1 argument(s)" ) if too_many_args not in str(exc): raise deprecations.warn('generate-schema-name-single-arg') schema = get_schema(schema_override) # type: ignore parsed_node.schema = schema.strip()
def _update_parsed_node_info(self, parsed_node, config): """Given the SourceConfig used for parsing and the parsed node, generate and set the true values to use, overriding the temporary parse values set in _build_intermediate_parsed_node. """ # Set tags on node provided in config blocks model_tags = config.config.get('tags', []) parsed_node.tags.extend(model_tags) # Overwrite node config config_dict = parsed_node.get('config', {}) config_dict.update(config.config) parsed_node.config = config_dict # Special macro defined in the global project. Use the root project's # definition, not the current package schema_override = config.config.get('schema') get_schema = self.get_schema_func() try: schema = get_schema(schema_override, parsed_node) except dbt.exceptions.CompilationException as exc: too_many_args = ( "macro 'dbt_macro__generate_schema_name' takes not more than " "1 argument(s)" ) if too_many_args not in str(exc): raise deprecations.warn('generate-schema-name-single-arg') schema = get_schema(schema_override) parsed_node.schema = schema.strip() alias_override = config.config.get('alias') get_alias = self.get_alias_func() parsed_node.alias = get_alias(alias_override, parsed_node).strip() parsed_node.database = config.config.get( 'database', self.default_database ).strip() for hook_type in dbt.hooks.ModelHookType.Both: parsed_node.config[hook_type] = dbt.hooks.get_hooks(parsed_node, hook_type)
def patch_nodes( self, patches: MutableMapping[str, ParsedNodePatch] ) -> None: """Patch nodes with the given dict of patches. Note that this consumes the input! This relies on the fact that all nodes have unique _name_ fields, not just unique unique_id fields. """ # because we don't have any mapping from node _names_ to nodes, and we # only have the node name in the patch, we have to iterate over all the # nodes looking for matching names. We could use a NameSearcher if we # were ok with doing an O(n*m) search (one nodes scan per patch) for node in self.nodes.values(): patch = patches.pop(node.name, None) if not patch: continue expected_key = node.resource_type.pluralize() if expected_key != patch.yaml_key: if patch.yaml_key == 'models': deprecations.warn( 'models-key-mismatch', patch=patch, node=node, expected_key=expected_key ) else: raise_invalid_patch( node, patch.yaml_key, patch.original_file_path ) node.patch(patch) # log debug-level warning about nodes we couldn't find if patches: for patch in patches.values(): # since patches aren't nodes, we can't use the existing # target_not_found warning logger.debug(( 'WARNING: Found documentation for resource "{}" which was ' 'not found or is disabled').format(patch.name) )
def load_all( cls, root_config: RuntimeConfig, internal_manifest: Optional[Manifest], macro_hook: Callable[[Manifest], Any], ) -> Manifest: with PARSING_STATE: projects = root_config.load_dependencies() v1_configs = [] for project in projects.values(): if project.config_version == 1: v1_configs.append(f'\n\n - {project.project_name}') if v1_configs: deprecations.warn('dbt-project-yaml-v1', project_names=''.join(v1_configs)) loader = cls(root_config, projects, macro_hook) loader.load(internal_manifest=internal_manifest) loader.write_parse_results() manifest = loader.create_manifest() _check_manifest(manifest, root_config) manifest.build_flat_graph() return manifest
def __init__(self, args, config): super(SnapshotTask, self).__init__(args, config) if args.which == 'archive': warn('archives')
def __len__(self): deprecations.warn('not-a-dictionary', obj=self) return len(fields(self.__class__))
def __iter__(self): deprecations.warn('not-a-dictionary', obj=self) for _, name in self._get_fields(): yield name
def inner(*args, **kwargs): warn('adapter:{}'.format(func_name)) return func(*args, **kwargs)
def _warn_for_deprecated_configs(manifest): for unique_id, node in manifest.nodes.items(): is_model = node.resource_type == NodeType.Model if is_model and 'sql_where' in node.config: deprecations.warn('sql_where')