Beispiel #1
0
    def apply(self, analysis):
        # Do nothing if project is not loaded
        if self._project not in analysis.loaded_projects:
            return

        # Create database connector
        connector = blox.connect.Connector(analysis.database_directory)

        # Create empty directory
        outdir = analysis.results_directory
        os.makedirs(outdir)

        # Compute query block name
        blockname = '{}-queries'.format(self._project.name)
        _logger.info("Executing named block %s", blockname)

        # Execute relevant block
        with cd(outdir):
            connector.execute_block(blockname)
Beispiel #2
0
    def apply(self, analysis):
        dbdir = analysis.database_directory
        factdir = analysis.facts_directory

        _logger.info("Loading data from %s ...", factdir)

        # Unpack required projects
        with unpacked_project('schema') as schema_project:
            with unpacked_project('import') as import_project:
                # Temporarily switch directory so that facts can be loaded
                with cd(analysis.output_directory):
                    # Execute script while ignoring output
                    blox.LoadSchemaScript(
                        workspace=dbdir,
                        script_path=files.mktemp(suffix='.lb'),
                        schema_path=schema_project,
                        import_path=import_project).run()

        _logger.info("Stored database in %s", dbdir)
    def apply(self, analysis):
        # Do nothing if project is not loaded
        if self._project not in analysis.loaded_projects:
            return

        # Create database connector
        connector = blox.connect.Connector(analysis.database_directory)

        # Create empty directory
        outdir = analysis.results_directory
        os.makedirs(outdir)

        # Compute query block name
        blockname = '{}-queries'.format(self._project.name)
        _logger.info("Executing named block %s", blockname)

        # Execute relevant block
        with cd(outdir):
            connector.execute_block(blockname)
    def apply(self, analysis):
        dbdir = analysis.database_directory
        factdir = analysis.facts_directory

        _logger.info("Loading data from %s ...", factdir)

        # Unpack required projects
        with unpacked_project('schema') as schema_project:
            with unpacked_project('import') as import_project:
                # Temporarily switch directory so that facts can be loaded
                with cd(analysis.output_directory):
                    # Execute script while ignoring output
                    blox.LoadSchemaScript(
                        workspace=dbdir,
                        script_path=files.mktemp(suffix='.lb'),
                        schema_path=schema_project,
                        import_path=import_project
                    ).run()

        _logger.info("Stored database in %s", dbdir)
Beispiel #5
0
    def apply(self, analysis):
        # Dynamically import JSON collector
        from .collect.json_collector import JSONCollector

        # Load JSON export logic module
        analysis.load_project('json-export')

        # Create database connector
        connector = blox.connect.Connector(analysis.database_directory)

        # Create empty directory
        tmpdir = runtime.FileManager().mkdtemp()
        _logger.info("Exporting CSV files to prepare JSON export")

        # Execute relevant block
        with cd(tmpdir):
            connector.execute_block('json-export')

        _logger.info("CSV files exported")
        _logger.info("Running collector")

        collector = JSONCollector(analysis)
        collector.run(tmpdir)
    def apply(self, analysis):
        # Dynamically import JSON collector
        from .collect.json_collector import JSONCollector

        # Load JSON export logic module
        analysis.load_project('json-export')

        # Create database connector
        connector = blox.connect.Connector(analysis.database_directory)

        # Create empty directory
        tmpdir = runtime.FileManager().mkdtemp()
        _logger.info("Exporting CSV files to prepare JSON export")

        # Execute relevant block
        with cd(tmpdir):
            connector.execute_block('json-export')

        _logger.info("CSV files exported")
        _logger.info("Running collector")

        collector = JSONCollector(analysis)
        collector.run(tmpdir)