def StopCoverage(self, output_stream):
        if not self._dirs:
            return 0

        # Move coverage data to this dir
        output_dir = os.path.dirname(self._coverage_filename)

        for filename in FileSystem.WalkFiles(
                output_dir,
                include_file_extensions=[".gcda"],
        ):
            dest_filename = os.path.join(output_dir,
                                         os.path.basename(filename))
            if dest_filename == filename:
                continue

            if not os.path.isfile(dest_filename):
                shutil.copyfile(filename, dest_filename)

        return Process.Execute(
            '{script} Lcov {dirs} "/output_dir={output}"'.format(
                script=CurrentShell.CreateScriptName("ExtractCoverageInfo"),
                dirs=" ".join(
                    ['"/bin_dir={}"'.format(dir) for dir in self._dirs]),
                output=output_dir,
            ),
            output_stream,
        )
Exemplo n.º 2
0
def _GetBuildInfos(root_dir, output_stream):
    root_dir = os.path.realpath(root_dir)

    build_infos = []

    output_stream.write("\nSearching for build files...")
    with output_stream.DoneManager( done_suffix=lambda: "{} found".format(inflect.no("build file", len(build_infos))),
                                  ):
        name, ext = os.path.splitext(BUILD_FILENAME)

        for fullpath in FileSystem.WalkFiles( root_dir,
                                              include_file_base_names=[ name, ],
                                              include_file_extensions=[ ext, ],
                                            ):
            if os.path.exists(os.path.join(os.path.dirname(fullpath), BUILD_FILENAME_IGNORE)):
                continue

            build_infos.append(_BuildInfo( fullpath,
                                           Configuration.FromBuildFile( fullpath, 
                                                                        strip_path=root_dir,
                                                                      ),
                                         ))

        build_infos.sort(key=lambda item: item.configuration.Priority)

    return build_infos
Exemplo n.º 3
0
def Normalize( script_filename_or_dir,
               output_stream=sys.stdout,
             ):
    """Normalizes a script so that it can be run from any location."""

    with StreamDecorator(output_stream).DoneManager( line_prefix='',
                                                     prefix="\nResults: ",
                                                     suffix='\n',
                                                   ) as dm:
        if os.path.isfile(script_filename_or_dir):
            script_filenames = [ script_filename_or_dir, ]
        elif os.path.isdir(script_filename_or_dir):
            script_filenames = list(FileSystem.WalkFiles(script_filename_or_dir, recurse=False))
        else:
            assert False

        for index, script_filename in enumerate(script_filenames):
            nonlocals = CommonEnvironment.Nonlocals(result=None)

            dm.stream.write("Processing '{}' ({} of {})...".format( script_filename,
                                                                    index + 1,
                                                                    len(script_filenames),
                                                                  ))
            with dm.stream.DoneManager( done_suffix=lambda: PythonActivationActivity.NormalizeScriptResultStrings[nonlocals.result],
                                      ):
                nonlocals.result = PythonActivationActivity.NormalizeScript(script_filename)
        
        return dm.result
Exemplo n.º 4
0
    def GetRelativeFiles(dir):
        output_stream.write("Processing files in '{}'...".format(dir))
        with output_stream.DoneManager():
            results = {}

            for filename in FileSystem.WalkFiles(dir):
                assert filename.startswith(dir), (filename, dir)
                results[FileSystem.TrimPath(filename, dir)] = _CalculateHash(filename)

        return results
Exemplo n.º 5
0
def _CopyHeaders(temp_directory, output_dir, output_stream):
    output_files = []

    output_files += list(
        FileSystem.WalkFiles(
            _script_dir,
            include_file_extensions=[".h"],
            include_file_base_names=[
                lambda basename: basename.startswith("SharedLibrary_")
            ],
            recurse=False,
        ),
    )

    output_files += list(
        FileSystem.WalkFiles(
            os.path.join(_script_dir, "GeneratedCode"),
            include_file_extensions=[".h"],
            include_file_base_names=[
                lambda basename: basename.startswith("SharedLibrary_")
            ],
            exclude_file_names=["SharedLibrary_PointerTable.h"],
            recurse=False,
        ),
    )

    for index, output_file in enumerate(output_files):
        output_stream.write(
            "Copying '{}' ({} of {})...".format(output_file, index + 1, len(output_files)),
        )
        with output_stream.DoneManager():
            shutil.copyfile(
                output_file,
                os.path.join(output_dir, os.path.basename(output_file)),
            )

    return 0
Exemplo n.º 6
0
 def GetFiles():
     return FileSystem.WalkFiles(
         os.getcwd(),
         recurse=False,
     )
Exemplo n.º 7
0
 def GetFiles():
     return FileSystem.WalkFiles(
         repo_root,
         traverse_exclude_dir_names=cls.WorkingDirectories,
     )
fundamental_repo = os.getenv("DEVELOPMENT_ENVIRONMENT_FUNDAMENTAL")
assert os.path.isdir(fundamental_repo), fundamental_repo

sys.path.insert(0, fundamental_repo)
from RepositoryBootstrap import *  # <Unused import> pylint: disable = W0614
from RepositoryBootstrap.SetupAndActivate import CurrentShell  # <Unused import> pylint: disable = W0614
from RepositoryBootstrap.SetupAndActivate.Configuration import *  # <Unused import> pylint: disable = W0614

del sys.path[0]

# ----------------------------------------------------------------------
source_dir = os.path.join(_script_dir, "Tools", "cmake", "v3.13.4",
                          "customizations")
assert os.path.isdir(source_dir), source_dir

dest_dir = os.path.join(
    _script_dir,
    "Tools",
    "cmake",
    "v3.13.4",
    CurrentShell.CategoryName,
    os.getenv("DEVELOPMENT_ENVIRONMENT_ENVIRONMENT_NAME"),
    "share",
    "cmake-3.13",
    "Modules",
)

for filename in FileSystem.WalkFiles(source_dir):
    shutil.copyfile(filename, os.path.join(dest_dir,
                                           os.path.basename(filename)))
def Execute():
    """\
    Uses cog (https://nedbatchelder.com/code/cog/) to update vscode's launch.json file.

    Example:

        Within 'launch.json':

            // [[[cog from CommonEnvironmentEx import VsCodeCogger; VsCodeCogger.Execute() ]]]
            // [[[end]]]

        From the command line:

            cog -r "<launch.json filename>"

    """

    # Get the files
    cog_filename = os.path.realpath(cog.inFile)
    assert os.path.isfile(cog_filename), cog_filename

    dirname = os.path.realpath(os.path.join(os.path.dirname(cog_filename), ".."))
    assert os.path.isdir(dirname), dirname

    filenames = FileSystem.WalkFiles(
        dirname,
        include_dir_names=lambda name: name.endswith("Tests") and name != "Tests",
        include_file_extensions=".py",
        exclude_file_names="__init__.py",
    )

    # Organize the files
    groups = OrderedDict()
    test_names_ctr = {}

    for filename in filenames:
        test_name = os.path.basename(filename)

        if test_name in test_names_ctr:
            test_names_ctr[test_name] += 1
        else:
            test_names_ctr[test_name] = 1

        assert filename.startswith(dirname), (filename, dirname)
        group = os.path.dirname(FileSystem.TrimPath(filename, dirname)).replace(os.path.sep, "/")

        groups.setdefault(group, []).append(filename)

    if not groups:
        return

    # Load the test parsers
    dynamic_test_parser_filename = os.getenv("DEVELOPMENT_ENVIRONMENT_TEST_PARSERS")
    assert os.path.isfile(dynamic_test_parser_filename), dynamic_test_parser_filename

    with open(dynamic_test_parser_filename) as f:
        test_parser_filenames = f.readlines()

    test_parsers = []

    for test_parser_filename in test_parser_filenames:
        test_parser_filename = test_parser_filename.strip()
        if not test_parser_filename:
            continue

        assert test_parser_filename, test_parser_filename
        assert os.path.isfile(test_parser_filename), test_parser_filename

        dirname, basename = os.path.split(test_parser_filename)
        basename = os.path.splitext(basename)[0]

        sys.path.insert(0, dirname)
        with CallOnExit(lambda: sys.path.pop(0)):
            mod = importlib.import_module(basename)

            parser = getattr(mod, "TestParser", None)
            assert parser is not None, test_parser_filename

            assert parser.Name in _CONFIGURATIONS, parser.Name

            test_parsers.append(parser)

    # Write the output
    cog.out(
        textwrap.dedent(
            """\

            // ----------------------------------------------------------------------
            // |
            // |  Cog Output
            // |
            // ----------------------------------------------------------------------

            // To regenerate this content:
            //
            //    cog -r "{}"

            // ----------------------------------------------------------------------
            // ----------------------------------------------------------------------
            // ----------------------------------------------------------------------

            """,
        ).format(cog.inFile.replace("\\", "\\\\")),
    )

    for group, filenames in groups.items():
        cog.out(
            textwrap.dedent(
                """\
                // ----------------------------------------------------------------------
                // |
                // |  {}
                // |
                // ----------------------------------------------------------------------
                """,
            ).format(group),
        )

        for filename in filenames:
            for parser in test_parsers:
                if parser.IsSupportedTestItem(filename):
                    dirname, basename = os.path.split(filename)

                    cog.out(
                        _CONFIGURATIONS[parser.Name].format(
                            filename=filename.replace(os.path.sep, "/"),
                            dirname=dirname.replace(os.path.sep, "/"),
                            basename=basename,
                            group=group,
                            name="{}{}".format(
                                os.path.splitext(basename)[0],
                                "" if test_names_ctr[basename] == 1 else " --- {}".format(group),
                            ),
                        ),
                    )

                    break

        cog.out("\n")
Exemplo n.º 10
0
def EntryPoint(
    code_dir_or_doxygen_filename,
    output_dir,
    output_stream=sys.stdout,
    verbose=False,
):
    with StreamDecorator(output_stream).DoneManager(
            line_prefix="",
            prefix="\nResults: ",
            suffix="\n",
    ) as dm:
        # Get the doxygen files
        doxygen_files = []

        if os.path.isfile(code_dir_or_doxygen_filename):
            doxygen_files.append(code_dir_or_doxygen_filename)
        else:
            dm.stream.write(
                "Searching for doxygen files in '{}'...".format(
                    code_dir_or_doxygen_filename, ), )
            with dm.stream.DoneManager(
                    done_suffix=lambda: "{} found".format(
                        inflect.no("file", len(doxygen_files)), ),
                    suffix="\n",
            ) as this_dm:
                for fullpath in FileSystem.WalkFiles(
                        code_dir_or_doxygen_filename,
                        include_file_extensions=[DOXYGEN_EXTENSION],
                        traverse_exclude_dir_names=FileSystem.
                        CODE_EXCLUDE_DIR_NAMES,
                ):
                    if not os.path.isfile(
                            "{}{}".format(
                                os.path.splitext(fullpath)[0],
                                DOXYGEN_EXTENSION_IGNORE,
                            ), ):
                        doxygen_files.append(fullpath)

            if not doxygen_files:
                return dm.result

        # Process the files

        # ----------------------------------------------------------------------
        class GetDoxygenValueError(KeyError):
            """Exception raised when a doxygen tag is not found"""

            pass

        # ----------------------------------------------------------------------
        def GetDoxygenValue(tag, content):
            match = re.search(
                r"{}[ \t]*=[ \t]*(?P<value>.*?)\r?\n".format(re.escape(tag)),
                content,
                re.IGNORECASE,
            )

            if not match:
                raise GetDoxygenValueError(
                    "Unable to find '{}' in the doxygen configuration file".
                    format(tag), )

            return match.group("value")

        # ----------------------------------------------------------------------

        results = OrderedDict()

        dm.stream.write(
            "Processing {}...".format(
                inflect.no("doxygen file", len(doxygen_files))), )
        with dm.stream.DoneManager(suffix="\n", ) as doxygen_dm:
            for index, doxygen_file in enumerate(doxygen_files):
                doxygen_dm.stream.write(
                    "Processing '{}' ({} of {})...".format(
                        doxygen_file,
                        index + 1,
                        len(doxygen_files),
                    ), )
                with doxygen_dm.stream.DoneManager() as this_dm:
                    prev_dir = os.getcwd()

                    os.chdir(os.path.dirname(doxygen_file))
                    with CallOnExit(lambda: os.chdir(prev_dir)):
                        # Execute
                        this_dm.result = Process.Execute(
                            'dot -c && doxygen "{}"'.format(doxygen_file),
                            StreamDecorator(
                                this_dm.stream if verbose else None),
                        )

                        if this_dm.result != 0:
                            continue

                        # Extract data from the doxygen file
                        with open(doxygen_file) as f:
                            content = f.read()

                        project_name = GetDoxygenValue("PROJECT_NAME", content)

                        # Older doxygen files don't have a PROJECT_VERSION
                        try:
                            project_version = GetDoxygenValue(
                                "PROJECT_VERSION", content)
                        except GetDoxygenValueError:
                            project_version = GetDoxygenValue(
                                "PROJECT_NUMBER", content)

                        output_directory = GetDoxygenValue(
                            "OUTPUT_DIRECTORY", content)

                        source_dir = os.path.dirname(doxygen_file)
                        if output_directory:
                            output_directory = os.pth.join(
                                source_dir, output_directory)

                        dest_dir = os.path.join(output_dir, project_name)
                        if project_version:
                            dest_dir = os.path.join(dest_dir, project_version)

                        dest_dir = dest_dir.replace('"', "").strip()
                        FileSystem.MakeDirs(dest_dir)

                        for content_type in [
                                "html",
                                "Latex",
                                "RTF",
                                "man",
                                "XML",
                        ]:
                            value = GetDoxygenValue(
                                "GENERATE_{}".format(content_type),
                                content,
                            )
                            if not value or value.lower() != "yes":
                                continue

                            output_name = GetDoxygenValue(
                                "{}_OUTPUT".format(content_type),
                                content,
                            )

                            source_fullpath = os.path.join(
                                source_dir, output_name)
                            dest_fullpath = os.path.join(dest_dir, output_name)

                            if not os.path.isdir(source_fullpath):
                                this_dm.stream.write(
                                    "ERROR: The directory '{}' does not exist.\n"
                                    .format(source_fullpath, ), )
                                this_dm.result = -1
                                continue

                            FileSystem.RemoveTree(dest_fullpath)
                            shutil.move(source_fullpath, dest_fullpath)

                            results.setdefault(
                                doxygen_file,
                                OrderedDict())[content_type] = dest_fullpath

                        # Tagfile
                        value = GetDoxygenValue("GENERATE_TAGFILE", content)
                        if value:
                            source_fullpath = os.path.join(source_dir, value)
                            dest_fullpath = os.path.join(dest_dir, value)

                            if not os.path.isfile(source_fullpath):
                                this_dm.stream.write(
                                    "ERROR: The filename '{}' does not exist.\n"
                                    .format(source_fullpath, ), )
                                this_dm.result = -1
                                continue

                            FileSystem.RemoveFile(dest_fullpath)
                            shutil.move(source_fullpath, dest_fullpath)

                            results.setdefault(
                                doxygen_file,
                                OrderedDict())["tagfile"] = dest_fullpath

        # Generate the json file
        output_filename = os.path.join(
            output_dir,
            "{}.json".format(os.path.splitext(_script_name)[0]),
        )

        dm.stream.write("Writing '{}'...".format(output_filename))
        with dm.stream.DoneManager() as this_dm:
            with open(output_filename, "w") as f:
                json.dump(results, f)

        return dm.result
Exemplo n.º 11
0
    def Create(cls, settings):
        # ----------------------------------------------------------------------
        def GetItems(directory, ignore_set):
            items = []

            assert os.path.isdir(directory), directory
            is_bin_dir = PythonActivationActivity.BinSubdirs and directory.endswith(os.path.join(*PythonActivationActivity.BinSubdirs))
            
            for item in os.listdir(directory):
                if item in ignore_set:
                    continue

                fullpath = os.path.join(directory, item)
                if not CurrentShell.IsSymLink(fullpath):
                    if ( CurrentShell.CategoryName == "Linux" and 
                         is_bin_dir and 
                         item.startswith("python")
                       ):
                        continue

                    items.append(fullpath)

            return items

        # ----------------------------------------------------------------------

        # Get the libraries
        new_libraries = GetItems(settings.LibraryDir, set([ "__pycache__", EASY_INSTALL_PTH_FILENAME, ]))

        # Ignore .pyc files (which may be here for python 2.7)
        new_libraries = [ item for item in new_libraries if not os.path.splitext(item)[1] == ".pyc" ]

        # Get os-specific library extensions 
        os_specific_extensions = [ ".pyd", ".so", CurrentShell.ScriptExtension, ]
        if CurrentShell.ExecutableExtension:
            os_specific_extensions.append(CurrentShell.ExecutableExtension)

        new_library_extensions = []

        for new_library in new_libraries:
            if not os.path.isdir(new_library):
                continue

            new_library_extensions += FileSystem.WalkFiles( new_library,
                                                            include_file_extensions=os_specific_extensions,
                                                          )

        script_ignore_items = set([ "__pycache__", WRAPPERS_FILENAME, ])

        # Create the script's wrappers file to get a list of all the script files that
        # should be ignored.
        potential_filename = os.path.join(settings.ScriptDir, WRAPPERS_FILENAME)
        if os.path.isfile(potential_filename):
            for name in [ line.strip() for line in open(potential_filename).readlines() if line.strip() ]:
                script_ignore_items.add(name)

        # Get the scripts
        new_scripts = GetItems(settings.ScriptDir, script_ignore_items)

        # Get os-specific script extensions
        new_script_extensions = [ item for item in new_scripts if os.path.splitext(item)[1] in os_specific_extensions ]

        return cls( new_libraries,
                    new_library_extensions,
                    new_scripts,
                    new_script_extensions,
                  )
Exemplo n.º 12
0
def CreatePluginMap(
    dynamic_plugin_architecture_environment_key,
    plugins_dir,
    output_stream,
):
    """
    Loads all plugins specified by the given environment key or that
    reside in the provided plugins_dir.

    Returns dict: { "<plugin name>" : PluginInfo(), ... }
    """

    plugin_mods = []

    if os.getenv(dynamic_plugin_architecture_environment_key):
        assert os.getenv("DEVELOPMENT_ENVIRONMENT_FUNDAMENTAL")

        sys.path.insert(0, os.getenv("DEVELOPMENT_ENVIRONMENT_FUNDAMENTAL"))
        with CallOnExit(lambda: sys.path.pop(0)):
            from RepositoryBootstrap.SetupAndActivate import DynamicPluginArchitecture

        plugin_mods = list(
            DynamicPluginArchitecture.EnumeratePlugins(
                dynamic_plugin_architecture_environment_key))

    elif os.path.isdir(plugins_dir):
        for filename in FileSystem.WalkFiles(
                plugins_dir,
                include_file_base_names=[
                    lambda name: name.endswith("Plugin"),
                ],
                include_file_extensions=[
                    ".py",
                ],
                recurse=True,
        ):
            name = os.path.splitext(os.path.basename(filename))[0]
            plugin_mods.append(imp.load_source(name, filename))

    else:
        raise Exception(
            "'{}' is not a valid environment variable and '{}' is not a valid directory"
            .format(
                dynamic_plugin_architecture_environment_key,
                plugins_dir,
            ))

    plugins = OrderedDict()

    # ----------------------------------------------------------------------
    def GetPluginImpl(plugin_name, raise_on_error=True):
        if plugin_name not in plugins:
            if raise_on_error:
                raise Exception(
                    "'{}' is not a valid plugin".format(plugin_name))

            return None

        return plugins[plugin_name]

    # ----------------------------------------------------------------------

    warning_stream = StreamDecorator(
        output_stream,
        line_prefix="WARNING: ",
        suffix='\n',
    )

    with CallOnExit(lambda: warning_stream.flush(force_suffix=True)):
        if not plugin_mods:
            warning_stream.write("No plugins were found.\n")

        for plugin_mod in plugin_mods:
            obj = getattr(plugin_mod, "Plugin", None)
            if obj is None:
                warning_stream.write(
                    "The module defined at '{}' does not contain a 'Plugin' class.\n"
                    .format(plugin_mod.__file__))
                continue

            # Dynamically add the method GetPlugin to the plugin object; this will allow
            # a plugin to query for other plugins. This is a bit wonky, but it works with
            # the plugin architecture where most of the plugins are static objects.
            obj.GetPlugin = staticmethod(GetPluginImpl)

            obj = obj()

            if not obj.IsValidEnvironment():
                warning_stream.write(
                    "The plugin '{}' is not valid within this environment ({}).\n"
                    .format(
                        obj.Name,
                        plugin_mod.__file__,
                    ))
                continue

            plugins[obj.Name] = PluginInfo(plugin_mod, obj)

    return plugins
Exemplo n.º 13
0
    def GenerateContextItems(cls, inputs, **kwargs):
        """
        Yields one or more context items from the given input.

        Context objects are arbitrary python objects used to define state/context
        about the invocation of the compiler. This information is used to specify 
        input and determine if the compiler should be invoked. 
        
        This context object must support pickling.
        """

        if not isinstance(inputs, list):
            inputs = [
                inputs,
            ]

        # Inputs may be grouped or produce a number of invocation groups.
        invocation_group_inputs = []

        for input in inputs:
            if os.path.isfile(input):
                if isinstance(cls.InputTypeInfo, FilenameTypeInfo):
                    result = cls.InputTypeInfo.ValidateItem(
                        input
                    )  # <Class '<name>' has no '<attr>' member> pylint: disable = E1101
                    if result is not None:
                        raise cls.DiagnosticException(result)

                    potential_inputs = [
                        input,
                    ]
                elif isinstance(cls.InputTypeInfo, DirectoryTypeInfo):
                    raise cls.DiagnosticException(
                        "The filename '{}' was provided as input, but this object operates on directories."
                        .format(input))
                else:
                    assert False, (cls.Name, cls.InputTypeInfo)

            elif os.path.isdir(input):
                if isinstance(cls.InputTypeInfo, FilenameTypeInfo):
                    potential_inputs = [
                        filename for filename in FileSystem.WalkFiles(input)
                        if cls.InputTypeInfo.IsValid(filename)
                    ]  # <Class '<name>' has no '<attr>' member> pylint: disable = E1101
                elif isinstance(cls.InputTypeInfo, DirectoryTypeInfo):
                    result = cls.InputTypeInfo.ValidateItem(
                        input
                    )  # <Class '<name>' has no '<attr>' member> pylint: disable = E1101
                    if result is not None:
                        raise cls.DiagnosticException(result)

                    potential_inputs = [
                        input,
                    ]
                else:
                    assert False, (cls.Name, cls.InputTypeInfo)

            else:
                raise Exception(
                    "The input '{}' is not a valid filename or directory".
                    format(input))

            invocation_group_inputs += [
                potential_input for potential_input in potential_inputs
                if cls.IsSupportedContent(potential_input)
            ]

        # Populate default metadata
        optional_metadata = cls._GetOptionalMetadata()

        if optional_metadata is None:
            # ----------------------------------------------------------------------
            def MetadataGenerator():
                # <Using a conditional statement with a constant value> pylint: disable = W0125
                if False:
                    yield

            # ----------------------------------------------------------------------

        elif isinstance(optional_metadata, dict):
            # ----------------------------------------------------------------------
            def MetadataGenerator():
                for kvp in six.iteritems(optional_metadata):
                    yield kvp

            # ----------------------------------------------------------------------

        elif isinstance(optional_metadata, list):
            # ----------------------------------------------------------------------
            def MetadataGenerator():
                for t in optional_metadata:
                    yield t

            # ----------------------------------------------------------------------

        else:
            assert False, type(optional_metadata)

        for k, v in MetadataGenerator():
            if k not in kwargs or kwargs[k] is None or kwargs[k] == '':
                kwargs[k] = v

        for metadata in cls._GenerateMetadataItems(invocation_group_inputs,
                                                   copy.deepcopy(kwargs)):
            for required_name in cls._GetRequiredMetadataNames():
                if required_name not in metadata:
                    raise Exception(
                        "'{}' is required metadata".format(required_name))

            metadata = copy.deepcopy(metadata)

            if not cls.GetInputItems(metadata):
                continue

            display_name = cls._GetDisplayName(metadata)
            if display_name:
                metadata["display_name"] = display_name

            context = cls._CreateContext(metadata)
            if not context:
                continue

            for required_name in cls._GetRequiredContextNames():
                if required_name not in context:
                    raise Exception("'{}' is required for {} ({})".format(
                        required_name,
                        cls.Name,
                        ', '.join([
                            "'{}'".format(input)
                            for input in cls.GetInputItems(context)
                        ]),
                    ))

            yield context
Exemplo n.º 14
0
def Generate(
    plugin,
    output_name,
    output_dir,
    input,
    include_regex=None,
    exclude_regex=None,
    output_data_filename_prefix=None,
    plugin_arg=None,
    force=False,
    output_stream=sys.stdout,
    verbose=False,
):
    """Generates content for the given input using the named plugin"""

    plugin_args = plugin_arg
    del plugin_arg

    include_regexes = include_regex
    del include_regex

    exclude_regexes = exclude_regex
    del exclude_regex

    with StreamDecorator(output_stream).DoneManager(
            line_prefix="",
            prefix="\nResults: ",
            suffix="\n",
    ) as dm:
        code_generator = CreateCodeGenerator(PLUGINS[plugin])

        # Get the inputs
        inputs = []

        dm.stream.write("Analyzing inputs...")
        with dm.stream.DoneManager(suffix="\n", ) as this_dm:
            for i in input:
                if os.path.isfile(i):
                    try:
                        code_generator.InputTypeInfo.ValidateItem(i)
                        inputs.append(i)
                    except Exception as ex:
                        this_dm.stream.write("ERROR: {}\n".format(str(ex)))
                        this_dm.result = -1

                elif os.path.isdir(i):
                    len_inputs = len(inputs)

                    for filename in FileSystem.WalkFiles(i):
                        if code_generator.InputTypeInfo.IsValid(filename):
                            inputs.append(filename)

                    if len(inputs) == len_inputs:
                        this_dm.stream.write(
                            "WARNING: No valid input files were found in '{}'\n"
                            .format(i))
                        if this_dm.result >= 0:
                            this_dm.result = 1

                else:
                    assert False, i

            if this_dm.result < 0:
                return this_dm.result

        # Invoke the code generator
        dm.stream.write("Generating code...")
        with dm.stream.DoneManager() as this_dm:
            this_dm.result = CommandLineGenerate(
                code_generator,
                inputs,
                this_dm.stream,
                verbose,
                plugin_name=plugin,
                output_name=output_name,
                output_dir=output_dir,
                plugin_settings=plugin_args,
                force=force,
                output_data_filename_prefix=output_data_filename_prefix,
                include_regexes=include_regexes,
                exclude_regexes=exclude_regexes,
            )
            if this_dm.result < 0:
                return this_dm.result

        return dm.result
def Html(
    bin_dir=None,
    profraw_filename="default.profraw",
    profdata_filename="default.profdata",
    executable=None,
    source_dir=None,
    output_filename="code_coverage.html",
    force=False,
    no_sparse=False,
    output_stream=sys.stdout,
    verbose=False,
):
    """Generates a HTML file based on *.profdata files"""

    executables = executable
    del executable

    source_dirs = source_dir
    del source_dir

    if bin_dir is None:
        bin_dir = os.getcwd()

    with StreamDecorator(output_stream).DoneManager(
        line_prefix="",
        prefix="\nResults: ",
        suffix="\n",
    ) as dm:
        # Generate the profdata file (if necessary)
        profdata_filename = os.path.join(bin_dir, profdata_filename)

        if force or not os.path.isfile(profdata_filename):
            profraw_filename = os.path.join(bin_dir, profraw_filename)
            if not os.path.isfile(profraw_filename):
                raise CommandLine.UsageException(
                    "'{}' does not exist.".format(profraw_filename),
                )

            dm.stream.write("Creating '{}'...".format(profdata_filename))
            with dm.stream.DoneManager(
                suffix="\n",
            ) as this_dm:
                FileSystem.MakeDirs(os.path.dirname(profdata_filename))

                command_line = 'llvm-profdata merge {sparse} -o "{output_filename}" "{input_filename}"'.format(
                    sparse="" if no_sparse else "-sparse",
                    output_filename=profdata_filename,
                    input_filename=profraw_filename,
                )

                if verbose:
                    this_dm.stream.write(
                        textwrap.dedent(
                            """\
                            Command Line:
                                {}

                            """,
                        ).format(command_line),
                    )

                this_dm.result = Process.Execute(command_line, this_dm.stream)
                if this_dm.result != 0:
                    return this_dm.result

        # Generate the html
        output_filename = os.path.join(bin_dir, output_filename)

        dm.stream.write("Creating '{}'...".format(output_filename))
        with dm.stream.DoneManager(
            suffix="\n",
        ) as this_dm:
            if not executables:
                this_dm.stream.write("Finding executables...")
                with this_dm.stream.DoneManager(
                    done_suffix=lambda: "{} found".format(
                        inflect.no("executable", len(executables)),
                    ),
                ) as find_dm:
                    if CurrentShell.ExecutableExtension:
                        executables = list(
                            FileSystem.WalkFiles(
                                bin_dir,
                                include_file_extensions=[
                                    CurrentShell.ExecutableExtension
                                ],
                                recurse=False,
                            ),
                        )
                    else:
                        for filename in FileSystem.WalkFiles(
                            bin_dir,
                            recurse=False,
                        ):
                            if os.access(filename, os.X_OK):
                                executables.append(filename)

            FileSystem.MakeDirs(os.path.dirname(output_filename))

            command_line = 'llvm-cov show {executables} "-instr-profile={profdata}" -use-color --format html {sources} > "{output_filename}"'.format(
                executables=" ".join(
                    ['"{}"'.format(executable) for executable in executables],
                ),
                profdata=profdata_filename,
                sources=" ".join(
                    ['"{}"'.format(source_dir) for source_dir in source_dirs],
                ) if source_dirs else "",
                output_filename=output_filename,
            )

            if verbose:
                this_dm.stream.write(
                    textwrap.dedent(
                        """\
                        Command Line:
                            {}

                        """,
                    ).format(command_line),
                )

            this_dm.result = Process.Execute(command_line, this_dm.stream)
            if this_dm.result != 0:
                return this_dm.result

        return dm.result
Exemplo n.º 16
0
def EntryPoint(
    delete_before=datetime.timedelta(days=7),
    yes=False,
    output_stream=sys.stdout,
    verbose=False,
):
    with StreamDecorator(output_stream).DoneManager(
            line_prefix='',
            prefix="\nResults: ",
            suffix='\n',
    ) as dm:
        verbose_stream = StreamDecorator(dm.stream if verbose else None,
                                         line_prefix="INFO: ")

        # Find the files

        # ----------------------------------------------------------------------
        FileInfo = namedtuple(
            "FileInfo",
            [
                "Name",
                "Type",
                "Fullpath",
                "Age",
                "Size",
            ],
        )

        # ----------------------------------------------------------------------

        t = time.time()

        dm.stream.write("Searching for files...")
        with dm.stream.DoneManager(suffix='\n', ):
            file_infos = []

            for filename in FileSystem.WalkFiles(
                    CurrentShell.TempDirectory,
                    include_file_extensions=[
                        RepositoryBootstrapConstants.TEMPORARY_FILE_EXTENSION,
                    ],
            ):
                name = os.path.splitext(
                    os.path.basename(filename))[0].split('.')

                if len(name) == 1:
                    type_ = ''
                    name = name[0]
                else:
                    type_ = name[-1]
                    name = '.'.join(name[:-1])

                file_infos.append(
                    FileInfo(
                        name,
                        type_,
                        filename,
                        datetime.timedelta(seconds=t -
                                           os.stat(filename)[stat.ST_MTIME]),
                        os.stat(filename)[stat.ST_SIZE],
                    ))

        if not file_infos:
            dm.stream.write("No files were found.\n")
            return dm.result

        dm.stream.write("{} {} found.\n".format(
            inflect.no("file", len(file_infos)),
            inflect.plural("was", len(file_infos)),
        ))

        verbose_stream.write("\nFiles found:\n{}\n\n".format('\n'.join(
            [fi.Fullpath for fi in file_infos])))

        # Trim the list based on age
        file_infos = [fi for fi in file_infos if fi.Age >= delete_before]

        if not file_infos:
            dm.stream.write(
                "No files were found older than {}.\n".format(delete_before))
            return dm.result

        if not yes:
            total_size = 0
            for fi in file_infos:
                total_size += fi.Size

            dm.stream.write(
                textwrap.dedent("""\

                Would you like to delete these files:

                    Name                        Type                Size               Age (days)                      Fullpath
                    --------------------------  ------------------  -----------------  ------------------------------  -----------------------------------------------
                {files}

                ? ({total_size}) [y/N] """).format(
                    files='\n'.join([
                        "    {name:<26}  {type:18}  {size:<17}  {age:<30}  {fullpath}"
                        .format(
                            name=fi.Name,
                            type=fi.Type,
                            size=FileSystem.GetSizeDisplay(fi.Size),
                            age=str(fi.Age),
                            fullpath=fi.Fullpath,
                        ) for fi in file_infos
                    ]),
                    total_size=FileSystem.GetSizeDisplay(total_size),
                ))

            value = six.moves.input().strip()
            if not value:
                value = 'N'

            value = value.lower()

            if value in [
                    "0",
                    "n",
                    "no",
            ]:
                return dm.result

        dm.stream.write("\nRemoving files...")
        with dm.stream.DoneManager() as this_dm:
            for index, fi in enumerate(file_infos):
                this_dm.stream.write("Removing '{}' ({} of {})...".format(
                    fi.Fullpath,
                    index + 1,
                    len(file_infos),
                ))
                with this_dm.stream.DoneManager():
                    FileSystem.RemoveFile(fi.Fullpath)

        return dm.result