コード例 #1
0
 def test_multiple_blocks(self):
     raw_sql = '{% macro foo(a, b) %}a ~ b{% endmacro %}\n{% macro bar(c, d) %}c + d{% endmacro %}'
     block = self.file_block_for(raw_sql, 'macro.sql')
     self.parser.parse_file(block)
     self.assert_has_results_length(self.parser.results, macros=2)
     macros = sorted(self.parser.results.macros.values(),
                     key=lambda m: m.name)
     expected_bar = ParsedMacro(
         name='bar',
         resource_type=NodeType.Macro,
         unique_id='macro.snowplow.bar',
         package_name='snowplow',
         original_file_path=normalize('macros/macro.sql'),
         root_path=get_abs_os_path('./dbt_modules/snowplow'),
         path=normalize('macros/macro.sql'),
         macro_sql='{% macro bar(c, d) %}c + d{% endmacro %}',
     )
     expected_foo = ParsedMacro(
         name='foo',
         resource_type=NodeType.Macro,
         unique_id='macro.snowplow.foo',
         package_name='snowplow',
         original_file_path=normalize('macros/macro.sql'),
         root_path=get_abs_os_path('./dbt_modules/snowplow'),
         path=normalize('macros/macro.sql'),
         macro_sql='{% macro foo(a, b) %}a ~ b{% endmacro %}',
     )
     self.assertEqual(macros, [expected_bar, expected_foo])
     path = get_abs_os_path('./dbt_modules/snowplow/macros/macro.sql')
     self.assertIn(path, self.parser.results.files)
     self.assertEqual(
         sorted(self.parser.results.files[path].macros),
         ['macro.snowplow.bar', 'macro.snowplow.foo'],
     )
コード例 #2
0
def generate_name_macros(package):
    from dbt.contracts.graph.parsed import ParsedMacro
    from dbt.node_types import NodeType
    name_sql = {}
    for component in ('database', 'schema', 'alias'):
        if component == 'alias':
            source = 'node.name'
        else:
            source = f'target.{component}'
        name = f'generate_{component}_name'
        sql = f'{{% macro {name}(value, node) %}} {{% if value %}} {{{{ value }}}} {{% else %}} {{{{ {source} }}}} {{% endif %}} {{% endmacro %}}'
        name_sql[name] = sql

    all_sql = '\n'.join(name_sql.values())
    for name, sql in name_sql.items():
        pm = ParsedMacro(
            name=name,
            resource_type=NodeType.Macro,
            unique_id=f'macro.{package}.{name}',
            package_name=package,
            original_file_path=normalize('macros/macro.sql'),
            root_path='./dbt_modules/root',
            path=normalize('macros/macro.sql'),
            raw_sql=all_sql,
            macro_sql=sql,
        )
        yield pm
コード例 #3
0
    def parse_macro_file(self,
                         macro_file_path,
                         macro_file_contents,
                         root_path,
                         package_name,
                         resource_type,
                         tags=None,
                         context=None):

        logger.debug("Parsing {}".format(macro_file_path))

        to_return = {}

        if tags is None:
            tags = []

        # change these to actual kwargs
        base_node = UnparsedMacro(
            path=macro_file_path,
            original_file_path=macro_file_path,
            package_name=package_name,
            raw_sql=macro_file_contents,
            root_path=root_path,
        )

        try:
            ast = dbt.clients.jinja.parse(macro_file_contents)
        except dbt.exceptions.CompilationException as e:
            e.node = base_node
            raise e

        for macro_node in ast.find_all(jinja2.nodes.Macro):
            macro_name = macro_node.name

            node_type = None
            if macro_name.startswith(dbt.utils.MACRO_PREFIX):
                node_type = NodeType.Macro
                name = macro_name.replace(dbt.utils.MACRO_PREFIX, '')

            if node_type != resource_type:
                continue

            unique_id = self.get_path(resource_type, package_name, name)

            merged = dbt.utils.deep_merge(
                base_node.serialize(), {
                    'name': name,
                    'unique_id': unique_id,
                    'tags': tags,
                    'resource_type': resource_type,
                    'depends_on': {
                        'macros': []
                    },
                })

            new_node = ParsedMacro(**merged)

            to_return[unique_id] = new_node

        return to_return
コード例 #4
0
    def parse_macro(self, base_node: UnparsedMacro, name: str) -> ParsedMacro:
        unique_id = self.generate_unique_id(name)

        return ParsedMacro(
            path=base_node.path,
            original_file_path=base_node.original_file_path,
            package_name=base_node.package_name,
            raw_sql=base_node.raw_sql,
            root_path=base_node.root_path,
            resource_type=base_node.resource_type,
            name=name,
            unique_id=unique_id,
        )
コード例 #5
0
ファイル: macros.py プロジェクト: drernie/dbt_salesforce
    def parse_macro(self, block: jinja.BlockTag, base_node: UnparsedMacro,
                    name: str) -> ParsedMacro:
        unique_id = self.generate_unique_id(name)

        return ParsedMacro(
            path=base_node.path,
            macro_sql=block.full_block,
            original_file_path=base_node.original_file_path,
            package_name=base_node.package_name,
            root_path=base_node.root_path,
            resource_type=base_node.resource_type,
            name=name,
            unique_id=unique_id,
        )
コード例 #6
0
ファイル: test_parser.py プロジェクト: convoyinc/dbt
 def test_single_block(self):
     raw_sql = '{% macro foo(a, b) %}a ~ b{% endmacro %}'
     block = self.file_block_for(raw_sql, 'macro.sql')
     self.parser.parse_file(block)
     self.assert_has_results_length(self.parser.results, macros=1)
     macro = list(self.parser.results.macros.values())[0]
     expected = ParsedMacro(
         name='foo',
         resource_type=NodeType.Macro,
         unique_id='macro.snowplow.foo',
         package_name='snowplow',
         original_file_path=normalize('macros/macro.sql'),
         root_path=get_abs_os_path('./dbt_modules/snowplow'),
         path=normalize('macros/macro.sql'),
         raw_sql=raw_sql
     )
     self.assertEqual(macro, expected)
     path = get_abs_os_path('./dbt_modules/snowplow/macros/macro.sql')
     self.assertIn(path, self.parser.results.files)
     self.assertEqual(self.parser.results.files[path].macros, ['macro.snowplow.foo'])
コード例 #7
0
    def _generate_macros(self):
        name_sql = {}
        for component in ('database', 'schema', 'alias'):
            if component == 'alias':
                source = 'node.name'
            else:
                source = f'target.{component}'
            name = f'generate_{component}_name'
            sql = f'{{% macro {name}(value, node) %}} {{% if value %}} {{{{ value }}}} {{% else %}} {{{{ {source} }}}} {{% endif %}} {{% endmacro %}}'
            name_sql[name] = sql

        for name, sql in name_sql.items():
            pm = ParsedMacro(
                name=name,
                resource_type=NodeType.Macro,
                unique_id=f'macro.root.{name}',
                package_name='root',
                original_file_path=normalize('macros/macro.sql'),
                root_path=get_abs_os_path('./dbt_modules/root'),
                path=normalize('macros/macro.sql'),
                macro_sql=sql,
            )
            yield pm
コード例 #8
0
ファイル: manifest.py プロジェクト: drernie/dbt_salesforce
def _process_docs_for_macro(context: Dict[str, Any],
                            macro: ParsedMacro) -> None:
    macro.description = get_rendered(macro.description, context)
    for arg in macro.arguments:
        arg.description = get_rendered(arg.description, context)
コード例 #9
0
ファイル: macros.py プロジェクト: vishalbelsare/dbt
    def parse_macro_file(cls,
                         macro_file_path,
                         macro_file_contents,
                         root_path,
                         package_name,
                         resource_type,
                         tags=None,
                         context=None):

        logger.debug("Parsing {}".format(macro_file_path))

        to_return = {}

        if tags is None:
            tags = []

        context = {}

        # change these to actual kwargs
        base_node = UnparsedMacro(
            path=macro_file_path,
            original_file_path=macro_file_path,
            package_name=package_name,
            raw_sql=macro_file_contents,
            root_path=root_path,
        )

        try:
            template = dbt.clients.jinja.get_template(macro_file_contents,
                                                      context,
                                                      node=base_node)
        except dbt.exceptions.CompilationException as e:
            e.node = base_node
            raise e

        for key, item in template.module.__dict__.items():
            if type(item) != jinja2.runtime.Macro:
                continue

            node_type = None
            if key.startswith(dbt.utils.MACRO_PREFIX):
                node_type = NodeType.Macro
                name = key.replace(dbt.utils.MACRO_PREFIX, '')

            elif key.startswith(dbt.utils.OPERATION_PREFIX):
                node_type = NodeType.Operation
                name = key.replace(dbt.utils.OPERATION_PREFIX, '')

            if node_type != resource_type:
                continue

            unique_id = cls.get_path(resource_type, package_name, name)

            merged = dbt.utils.deep_merge(
                base_node.serialize(), {
                    'name': name,
                    'unique_id': unique_id,
                    'tags': tags,
                    'resource_type': resource_type,
                    'depends_on': {
                        'macros': []
                    },
                })

            new_node = ParsedMacro(template=template, **merged)

            to_return[unique_id] = new_node

        return to_return