Example #1
0
    def test_multiple_raw_blocks(self):
        parser = docs.DocumentationParser(
            results=ParseResult.rpc(),
            root_project=self.root_project_config,
            project=self.subdir_project_config,
            macro_manifest=Manifest.from_macros())

        file_block = self._build_file(MULTIPLE_RAW_BLOCKS, 'test_file.md')

        parser.parse_file(file_block)
        results = sorted(parser.results.docs.values(), key=lambda n: n.name)
        self.assertEqual(len(results), 2)
        for result in results:
            self.assertIsInstance(result, ParsedDocumentation)
            self.assertEqual(result.package_name, 'some_package')
            self.assertEqual(result.original_file_path, self.testfile_path)
            self.assertEqual(result.root_path, self.subdir_path)
            self.assertEqual(result.resource_type, NodeType.Documentation)
            self.assertEqual(result.path, 'test_file.md')

        self.assertEqual(results[0].name, 'other_doc')
        self.assertEqual(results[0].block_contents,
                         '```\n    {% docs %}other doc{% enddocs %}\n    ```')
        self.assertEqual(results[1].name, 'some_doc')
        self.assertEqual(
            results[1].block_contents,
            '```\n    {% docs %}some doc{% enddocs %}\n    ```',
        )
Example #2
0
    def test_load_file(self):
        parser = docs.DocumentationParser(
            results=ParseResult.rpc(),
            root_project=self.root_project_config,
            project=self.subdir_project_config,
            macro_manifest=Manifest.from_macros())

        file_block = self._build_file(TEST_DOCUMENTATION_FILE, 'test_file.md')

        parser.parse_file(file_block)
        results = sorted(parser.results.docs.values(), key=lambda n: n.name)
        self.assertEqual(len(results), 2)
        for result in results:
            self.assertIsInstance(result, ParsedDocumentation)
            self.assertEqual(result.package_name, 'some_package')
            self.assertNotEqual(result.file_contents, TEST_DOCUMENTATION_FILE)
            self.assertEqual(result.original_file_path, self.testfile_path)
            self.assertEqual(result.root_path, self.subdir_path)
            self.assertEqual(result.resource_type, NodeType.Documentation)
            self.assertEqual(result.path, 'test_file.md')

        self.assertEqual(results[0].name, 'snowplow_sessions')
        self.assertEqual(results[0].file_contents, SNOWPLOW_SESSIONS_BLOCK)
        self.assertEqual(results[1].name, 'snowplow_sessions__session_id')
        self.assertEqual(results[1].file_contents,
                         SNOWPLOW_SESSIONS_SESSION_ID_BLOCK)
Example #3
0
 def load_only_macros(self) -> Manifest:
     old_results = self.read_parse_results()
     self._load_macros(old_results, internal_manifest=None)
     # make a manifest with just the macros to get the context
     macro_manifest = Manifest.from_macros(macros=self.results.macros,
                                           files=self.results.files)
     return macro_manifest
Example #4
0
    def setUp(self):
        dbt.flags.STRICT_MODE = True
        dbt.flags.WARN_ERROR = True

        self.maxDiff = None

        profile_data = {
            'target': 'test',
            'quoting': {},
            'outputs': {
                'test': {
                    'type': 'redshift',
                    'host': 'localhost',
                    'schema': 'analytics',
                    'user': '******',
                    'pass': '******',
                    'dbname': 'test',
                    'port': 1,
                }
            }
        }

        root_project = {
            'name': 'root',
            'version': '0.1',
            'profile': 'test',
            'project-root': normalize('/usr/src/app'),
        }

        self.root_project_config = config_from_parts_or_dicts(
            project=root_project,
            profile=profile_data,
            cli_vars='{"test_schema_name": "foo"}')

        snowplow_project = {
            'name': 'snowplow',
            'version': '0.1',
            'profile': 'test',
            'project-root': get_abs_os_path('./dbt_modules/snowplow'),
        }

        self.snowplow_project_config = config_from_parts_or_dicts(
            project=snowplow_project, profile=profile_data)

        self.all_projects = {
            'root': self.root_project_config,
            'snowplow': self.snowplow_project_config
        }

        self.root_project_config.dependencies = self.all_projects
        self.snowplow_project_config.dependencies = self.all_projects
        self.patcher = mock.patch('dbt.context.providers.get_adapter')
        self.factory = self.patcher.start()

        self.parser_patcher = mock.patch('dbt.parser.base.get_adapter')
        self.factory_parser = self.parser_patcher.start()

        self.macro_manifest = Manifest.from_macros(
            macros={m.unique_id: m
                    for m in generate_name_macros('root')})
Example #5
0
 def __init__(
     self,
     results: ParseResult,
     root_project: RuntimeConfig,
 ) -> None:
     self.results = results
     self.root_project = root_project
     self.macro_manifest = Manifest.from_macros(macros=self.results.macros,
                                                files=self.results.files)
     self.schema_parsers: Dict[str, SchemaParser] = {}
     self.patches_used: Dict[SourceKey, Set[str]] = {}
     self.sources: Dict[str, ParsedSourceDefinition] = {}
Example #6
0
    def load(self, internal_manifest: Optional[Manifest] = None):
        old_results = self.read_parse_results()
        if old_results is not None:
            logger.debug('Got an acceptable cached parse result')
        self._load_macros(old_results, internal_manifest=internal_manifest)
        # make a manifest with just the macros to get the context
        macro_manifest = Manifest.from_macros(macros=self.results.macros,
                                              files=self.results.files)
        self.macro_hook(macro_manifest)

        for project in self.all_projects.values():
            # parse a single project
            self.parse_project(project, macro_manifest, old_results)
Example #7
0
    def load_only_macros(self) -> Manifest:
        old_results = self.read_parse_results()

        for project in self.all_projects.values():
            parser = MacroParser(self.results, project)
            for path in parser.search():
                self.parse_with_cache(path, parser, old_results)

        # make a manifest with just the macros to get the context
        macro_manifest = Manifest.from_macros(
            macros=self.results.macros,
            files=self.results.files
        )
        self.macro_hook(macro_manifest)
        return macro_manifest
Example #8
0
    def test_load_file_extras(self):
        TEST_DOCUMENTATION_FILE + '{% model foo %}select 1 as id{% endmodel %}'

        parser = docs.DocumentationParser(
            results=ParseResult.rpc(),
            root_project=self.root_project_config,
            project=self.subdir_project_config,
            macro_manifest=Manifest.from_macros())

        file_block = self._build_file(TEST_DOCUMENTATION_FILE, 'test_file.md')

        parser.parse_file(file_block)
        results = sorted(parser.results.docs.values(), key=lambda n: n.name)
        self.assertEqual(len(results), 2)
        for result in results:
            self.assertIsInstance(result, ParsedDocumentation)
        self.assertEqual(results[0].name, 'snowplow_sessions')
        self.assertEqual(results[1].name, 'snowplow_sessions__session_id')
Example #9
0
    def setUp(self):
        dbt.flags.STRICT_MODE = True
        self.graph_result = None

        self.write_gpickle_patcher = patch('networkx.write_gpickle')
        self.load_projects_patcher = patch(
            'dbt.parser.manifest._load_projects')
        self.file_system_patcher = patch.object(
            dbt.parser.search.FilesystemSearcher, '__new__')
        self.hook_patcher = patch.object(dbt.parser.hooks.HookParser,
                                         '__new__')
        self.get_adapter_patcher = patch('dbt.context.providers.get_adapter')
        self.factory = self.get_adapter_patcher.start()
        # also patch this one

        self.get_adapter_patcher_parser = patch('dbt.parser.base.get_adapter')
        self.factory_cmn = self.get_adapter_patcher_parser.start()

        def mock_write_gpickle(graph, outfile):
            self.graph_result = graph

        self.mock_write_gpickle = self.write_gpickle_patcher.start()
        self.mock_write_gpickle.side_effect = mock_write_gpickle

        self.profile = {
            'outputs': {
                'test': {
                    'type': 'postgres',
                    'threads': 4,
                    'host': 'thishostshouldnotexist',
                    'port': 5432,
                    'user': '******',
                    'pass': '******',
                    'dbname': 'dbt',
                    'schema': 'dbt_test'
                }
            },
            'target': 'test'
        }

        self.mock_load_projects = self.load_projects_patcher.start()

        def _load_projects(config, paths):
            yield config.project_name, config

        self.mock_load_projects.side_effect = _load_projects

        self.mock_models = []

        def _mock_parse_result(config, all_projects):
            return ParseResult(
                vars_hash=FileHash.from_contents('vars'),
                project_hashes={
                    name: FileHash.from_contents(name)
                    for name in all_projects
                },
                profile_hash=FileHash.from_contents('profile'),
            )

        self.load_patch = patch('dbt.parser.manifest.make_parse_result')
        self.mock_parse_result = self.load_patch.start()
        self.mock_parse_result.side_effect = _mock_parse_result

        self.load_source_file_patcher = patch.object(BaseParser, 'load_file')
        self.mock_source_file = self.load_source_file_patcher.start()
        self.mock_source_file.side_effect = lambda path: [
            n for n in self.mock_models if n.path == path
        ][0]

        # self.relation_update_patcher = patch.object(RelationUpdate, '_relation_components', lambda: [])
        # self.mock_relation_update = self.relation_update_patcher.start()
        self.internal_manifest = Manifest.from_macros(macros={
            n.unique_id: n
            for n in generate_name_macros('test_models_compile')
        })

        def filesystem_iter(iter_self):
            if 'sql' not in iter_self.extension:
                return []
            if 'models' not in iter_self.relative_dirs:
                return []
            return [model.path for model in self.mock_models]

        def create_filesystem_searcher(cls, project, relative_dirs, extension):
            result = MagicMock(project=project,
                               relative_dirs=relative_dirs,
                               extension=extension)
            result.__iter__.side_effect = lambda: iter(filesystem_iter(result))
            return result

        def create_hook_patcher(cls, results, project, relative_dirs,
                                extension):
            result = MagicMock(results=results,
                               project=project,
                               relative_dirs=relative_dirs,
                               extension=extension)
            result.__iter__.side_effect = lambda: iter([])
            return result

        self.mock_filesystem_constructor = self.file_system_patcher.start()
        self.mock_filesystem_constructor.side_effect = create_filesystem_searcher
        self.mock_hook_constructor = self.hook_patcher.start()
        self.mock_hook_constructor.side_effect = create_hook_patcher