def _CreateContext(cls, metadata):
        metadata["output_filename"] = os.path.realpath(
            metadata["output_filename"])

        FileSystem.MakeDirs(os.path.dirname(metadata["output_filename"]))

        return super(SingleOutputMixin, cls)._CreateContext(metadata)
Example #2
0
    def _CreateContext(cls, metadata):
        metadata["output_dir"] = os.path.realpath(metadata["output_dir"])

        FileSystem.MakeDirs(metadata["output_dir"])

        return super(ConditionalInvocationQueryMixin,
                     cls)._CreateContext(metadata)
def Lcov(
    bin_dir=None,
    not_llvm=False,
    output_dir=None,
    output_filename="lcov.info",
    type=None,
    output_stream=sys.stdout,
    verbose=False,
):
    """Generates a LCOV file based on *.gcno files"""

    bin_dirs = bin_dir
    del bin_dir

    if not bin_dirs:
        bin_dirs.append(os.getcwd())

    if len(bin_dirs) > 1 and not output_dir:
        raise CommandLine.UsageException(
            "An 'output_dir' must be provided when multiple 'bin_dirs' are parsed",
        )

    if len(bin_dirs) == 1 and not output_dir:
        output_dir = bin_dirs[0]

    with StreamDecorator(output_stream).DoneManager(
        line_prefix="",
        prefix="\nResults: ",
        suffix="\n",
    ) as dm:
        output_filename = os.path.join(output_dir, output_filename)

        dm.stream.write("Creating '{}'...".format(output_filename))
        with dm.stream.DoneManager() as this_dm:
            FileSystem.MakeDirs(output_dir)

            command_line = 'grcov {dirs} -o "{output_filename}"{llvm}{type}'.format(
                dirs=" ".join(['"{}"'.format(dir) for dir in bin_dirs]),
                output_filename=output_filename,
                llvm="" if not_llvm else " --llvm",
                type="" if type is None else " -t {}".format(type),
            )

            if verbose:
                this_dm.stream.write(
                    textwrap.dedent(
                        """\
                        Command Line:
                            {}

                        """,
                    ).format(command_line),
                )

            this_dm.result = Process.Execute(command_line, this_dm.stream)
            if this_dm.result != 0:
                return this_dm.result

        return dm.result
    def _CreateContext(cls, metadata):
        for index, output_filename in enumerate(metadata["output_filenames"]):
            metadata["output_filenames"][index] = os.path.realpath(
                output_filename)
            FileSystem.MakeDirs(
                os.path.dirname(metadata["output_filenames"][index]))

        return super(MultipleOutputMixin, cls)._CreateContext(metadata)
Example #5
0
    def Create(cls, output_dir):
        if os.path.isdir(output_dir):
            raise Exception(
                "The directory '{}' already exists and will not be overwritten"
                .format(output_dir))

        FileSystem.MakeDirs(output_dir)
        return cls.Execute(os.getcwd(), 'git init "{}"'.format(output_dir))
Example #6
0
            def Impl(source_dir_or_filename, dest_dir):
                # shutil.move won't overwrite files, so use distutils (which will)
                if os.path.isdir(source_dir_or_filename):
                    import distutils.dir_util

                    distutils.dir_util.copy_tree(source_dir_or_filename, os.path.join(dest_dir, os.path.basename(source_dir_or_filename)))
                    FileSystem.RemoveTree(source_dir_or_filename)
                else:
                    FileSystem.MakeDirs(dest_dir)
                    shutil.move(source_dir_or_filename, dest_dir)
Example #7
0
    def Save(self):
        data = pickle.dumps(self)
        data = base64.b64encode(data)
        data = str(data)

        filename = self._GetPersistedFilename(self.Context)

        FileSystem.MakeDirs(os.path.dirname(filename))

        with open(filename, 'w') as f:
            f.write(self.TEMPLATE.format(data=data))
Example #8
0
    def Clone(cls, uri, output_dir, branch=None):
        if os.path.isdir(output_dir):
            raise Exception(
                "The directory '{}' already exists and will not be overwritten."
                .format(output_dir))

        clone_path, clone_name = os.path.split(output_dir)
        FileSystem.MakeDirs(clone_path)

        return cls.Execute(
            clone_path, 'git clone{branch} "{uri}" "{name}"'.format(
                branch=' --branch "{}"'.format(branch) if branch else '',
                uri=uri,
                name=clone_name,
            ))
Example #9
0
def EntryPoint(
    zipped_input_filename,
    output_stream=sys.stdout,
):
    """Generates JSON files based on data previously pickled"""

    with StreamDecorator(output_stream).DoneManager(
            line_prefix="",
            prefix="\nResults: ",
            suffix="\n",
    ) as dm:
        output_dir = os.path.join(_script_dir, "..", "GeneratedCode")

        FileSystem.RemoveTree(output_dir)
        FileSystem.MakeDirs(output_dir)

        df = _holiday_data_loader(zipped_input_filename)
        #with open('holidays.json', 'w') as f:
        #f.write(df.to_json(orient='records', lines=True))

        allCountryNames = list(set((df['countryOrRegion'])))

        for countryName in allCountryNames:

            dfByCountry = df.loc[df['countryOrRegion'] == countryName]

            date = [int(x.timestamp()) for x in list(dfByCountry['date'])]
            name = list(dfByCountry['normalizeHolidayName'])

            date_dict = {"Date": date}
            name_dict = {"Holiday": name}

            out = {}
            out.update(date_dict)
            out.update(name_dict)

            jsonPath = os.path.join(output_dir, "{}.json".format(countryName))
            with open(jsonPath, 'w') as f:
                json.dump(out, f)

        return dm.result
def Html(
    bin_dir=None,
    profraw_filename="default.profraw",
    profdata_filename="default.profdata",
    executable=None,
    source_dir=None,
    output_filename="code_coverage.html",
    force=False,
    no_sparse=False,
    output_stream=sys.stdout,
    verbose=False,
):
    """Generates a HTML file based on *.profdata files"""

    executables = executable
    del executable

    source_dirs = source_dir
    del source_dir

    if bin_dir is None:
        bin_dir = os.getcwd()

    with StreamDecorator(output_stream).DoneManager(
        line_prefix="",
        prefix="\nResults: ",
        suffix="\n",
    ) as dm:
        # Generate the profdata file (if necessary)
        profdata_filename = os.path.join(bin_dir, profdata_filename)

        if force or not os.path.isfile(profdata_filename):
            profraw_filename = os.path.join(bin_dir, profraw_filename)
            if not os.path.isfile(profraw_filename):
                raise CommandLine.UsageException(
                    "'{}' does not exist.".format(profraw_filename),
                )

            dm.stream.write("Creating '{}'...".format(profdata_filename))
            with dm.stream.DoneManager(
                suffix="\n",
            ) as this_dm:
                FileSystem.MakeDirs(os.path.dirname(profdata_filename))

                command_line = 'llvm-profdata merge {sparse} -o "{output_filename}" "{input_filename}"'.format(
                    sparse="" if no_sparse else "-sparse",
                    output_filename=profdata_filename,
                    input_filename=profraw_filename,
                )

                if verbose:
                    this_dm.stream.write(
                        textwrap.dedent(
                            """\
                            Command Line:
                                {}

                            """,
                        ).format(command_line),
                    )

                this_dm.result = Process.Execute(command_line, this_dm.stream)
                if this_dm.result != 0:
                    return this_dm.result

        # Generate the html
        output_filename = os.path.join(bin_dir, output_filename)

        dm.stream.write("Creating '{}'...".format(output_filename))
        with dm.stream.DoneManager(
            suffix="\n",
        ) as this_dm:
            if not executables:
                this_dm.stream.write("Finding executables...")
                with this_dm.stream.DoneManager(
                    done_suffix=lambda: "{} found".format(
                        inflect.no("executable", len(executables)),
                    ),
                ) as find_dm:
                    if CurrentShell.ExecutableExtension:
                        executables = list(
                            FileSystem.WalkFiles(
                                bin_dir,
                                include_file_extensions=[
                                    CurrentShell.ExecutableExtension
                                ],
                                recurse=False,
                            ),
                        )
                    else:
                        for filename in FileSystem.WalkFiles(
                            bin_dir,
                            recurse=False,
                        ):
                            if os.access(filename, os.X_OK):
                                executables.append(filename)

            FileSystem.MakeDirs(os.path.dirname(output_filename))

            command_line = 'llvm-cov show {executables} "-instr-profile={profdata}" -use-color --format html {sources} > "{output_filename}"'.format(
                executables=" ".join(
                    ['"{}"'.format(executable) for executable in executables],
                ),
                profdata=profdata_filename,
                sources=" ".join(
                    ['"{}"'.format(source_dir) for source_dir in source_dirs],
                ) if source_dirs else "",
                output_filename=output_filename,
            )

            if verbose:
                this_dm.stream.write(
                    textwrap.dedent(
                        """\
                        Command Line:
                            {}

                        """,
                    ).format(command_line),
                )

            this_dm.result = Process.Execute(command_line, this_dm.stream)
            if this_dm.result != 0:
                return this_dm.result

        return dm.result
Example #11
0
def Execute( root_dir,
             output_dir,
             mode=None,
             debug_only=False,
             release_only=False,
             output_stream=sys.stdout,
             verbose=False,
           ):
    """Recursively calls Build files with the desired mode(s)"""

    assert os.path.isdir(root_dir), root_dir
    assert output_dir
    modes = mode or [ "clean", "build", ]; del mode
    assert output_stream

    with StreamDecorator(output_stream).DoneManager( line_prefix='',
                                                     prefix="\nResults: ",
                                                     suffix='\n',
                                                   ) as dm:
        build_infos = _GetBuildInfos(root_dir, dm.stream)
        if not build_infos:
            return dm.result

        # Find all the build files that have configurations that we can process
        build_configurations = []

        dm.stream.write("Processing build files...")
        with dm.stream.DoneManager( done_suffix=lambda: "{} found".format(inflect.no("configuration", len(build_configurations))),
                                  ) as this_dm:
            # ----------------------------------------------------------------------
            def GetSupportedConfigurations(configurations):
                # If there is a configuration that indicates completeness, execute that
                # and skip everything else.
                if COMPLETE_CONFIGURATION_NAME in configurations:
                    yield COMPLETE_CONFIGURATION_NAME
                    return

                for config in build_configurations:
                    config_lower = config.lower()

                    if ( (debug_only and "debug" in config_lower) or
                         (release_only and "release" in config_lower) or
                         (not debug_only and not release_only)
                       ):
                        yield config

            # ----------------------------------------------------------------------

            for build_info in build_infos:
                if not build_info.configuration.Configurations:
                    build_configurations.append(( build_info.filename,
                                                  build_info.configuration,
                                                  None,
                                                ))
                else:
                    for config in GetSupportedConfigurations(build_info.configuration.Configurations):
                        build_configurations.append(( build_info.filename,
                                                      build_info.configuration,
                                                      config,
                                                    ))

        if not build_configurations:
            return dm.result

        dm.stream.write('\n')

        for mode_index, mode in enumerate(modes):
            dm.stream.write("Invoking '{}' ({} of {})...".format( mode,
                                                                  mode_index + 1,
                                                                  len(modes),
                                                                ))
            with dm.stream.DoneManager() as mode_dm:
                for build_index, (build_filename, config, configuration) in enumerate(build_configurations):
                    mode_dm.stream.write("Processing '{}'{} ({} of {})...".format( build_filename,
                                                                                   " - '{}'".format(configuration) if configuration else '',
                                                                                   build_index + 1,
                                                                                   len(build_configurations),
                                                                                 ))
                    with mode_dm.stream.DoneManager() as build_dm:
                        build_output_dir = os.path.join(output_dir, config.SuggestedOutputDirLocation, configuration or "Build")
                        FileSystem.MakeDirs(build_output_dir)

                        command_line = 'python "{build_filename}" {mode}{configuration}{output_dir}' \
                                            .format( build_filename=build_filename,
                                                     mode=mode,
                                                     configuration=' "{}"'.format(configuration) if configuration else '',
                                                     output_dir=' "{}"'.format(build_output_dir) if config.RequiresOutputDir else '',
                                                   )

                        build_dm.result, output = Process.Execute(command_line)

                        # It is possible that the cleaning process deleted the output directory. Recreate it
                        # if necessary to store the log file.
                        FileSystem.MakeDirs(build_output_dir)

                        with open(os.path.join(build_output_dir, BUILD_LOG_TEMPLATE.format(mode=mode)), 'w') as f:
                            f.write(output)

                        if build_dm.result != 0:
                            build_dm.stream.write(output)
                        elif verbose:
                            build_dm.stream.write(StringHelpers.LeftJustify("INFO: {}".format(output), len("INFO: ")))

        return dm.result
def _GenerateGlobalKernels(
    open_file_func,
    output_dir,
    all_items,
    all_type_mappings,
    all_custom_struct_data,
    output_stream,
):
    output_dir = os.path.join(output_dir, "featurizers_ops")
    FileSystem.MakeDirs(output_dir)

    with open_file_func(os.path.join(output_dir, "cpu_featurizers_kernels.h"),
                        "w") as f:
        f.write(
            textwrap.dedent(
                """\
                // Copyright (c) Microsoft Corporation. All rights reserved.
                // Licensed under the MIT License.

                #pragma once

                #include "core/framework/op_kernel.h"
                #include "core/framework/kernel_registry.h"

                namespace onnxruntime {
                namespace featurizers {

                Status RegisterCpuMSFeaturizersKernels(KernelRegistry& kernel_registry);

                }  // namespace featurizers
                }  // namespace onnxruntime
                """, ), )

    macros = []

    for items, type_mappings in zip(all_items, all_type_mappings):
        item = items[0]
        input_type_mappings, output_type_mappings = type_mappings

        transformer_name = item.name.replace("Featurizer", "Transformer")

        macros.append(
            "ONNX_OPERATOR_KERNEL_CLASS_NAME(kCpuExecutionProvider, kMSFeaturizersDomain, 1, {})"
            .format(transformer_name, ), )
        continue

    with open_file_func(os.path.join(output_dir, "cpu_featurizers_kernels.cc"),
                        "w") as f:
        f.write(
            textwrap.dedent(
                """\
                // Copyright (c) Microsoft Corporation. All rights reserved.
                // Licensed under the MIT License.

                #include "featurizers_ops/cpu_featurizers_kernels.h"

                #include "core/graph/constants.h"
                #include "core/framework/data_types.h"

                namespace onnxruntime {{
                namespace featurizers {{

                // Forward declarations
                {kernel_classes}

                Status RegisterCpuMSFeaturizersKernels(KernelRegistry& kernel_registry) {{
                  static const BuildKernelCreateInfoFn function_table[] = {{
                    {kernel_statements}
                  }};

                  for (auto& function_table_entry : function_table) {{
                    ORT_RETURN_IF_ERROR(kernel_registry.Register(function_table_entry()));
                  }}

                  return Status::OK();
                }}

                }}  // namespace featurizers
                }}  // namespace onnxruntime
                """, ).format(
                    kernel_classes="\n".join(
                        ["class {};".format(macro) for macro in macros]),
                    kernel_statements=StringHelpers.LeftJustify(
                        ",\n".join([
                            "BuildKernelCreateInfo<{}>".format(macro)
                            for macro in macros
                        ], ),
                        4,
                    ),
                ), )

    return 0
Example #13
0
def EntryPoint(
    code_dir_or_doxygen_filename,
    output_dir,
    output_stream=sys.stdout,
    verbose=False,
):
    with StreamDecorator(output_stream).DoneManager(
            line_prefix="",
            prefix="\nResults: ",
            suffix="\n",
    ) as dm:
        # Get the doxygen files
        doxygen_files = []

        if os.path.isfile(code_dir_or_doxygen_filename):
            doxygen_files.append(code_dir_or_doxygen_filename)
        else:
            dm.stream.write(
                "Searching for doxygen files in '{}'...".format(
                    code_dir_or_doxygen_filename, ), )
            with dm.stream.DoneManager(
                    done_suffix=lambda: "{} found".format(
                        inflect.no("file", len(doxygen_files)), ),
                    suffix="\n",
            ) as this_dm:
                for fullpath in FileSystem.WalkFiles(
                        code_dir_or_doxygen_filename,
                        include_file_extensions=[DOXYGEN_EXTENSION],
                        traverse_exclude_dir_names=FileSystem.
                        CODE_EXCLUDE_DIR_NAMES,
                ):
                    if not os.path.isfile(
                            "{}{}".format(
                                os.path.splitext(fullpath)[0],
                                DOXYGEN_EXTENSION_IGNORE,
                            ), ):
                        doxygen_files.append(fullpath)

            if not doxygen_files:
                return dm.result

        # Process the files

        # ----------------------------------------------------------------------
        class GetDoxygenValueError(KeyError):
            """Exception raised when a doxygen tag is not found"""

            pass

        # ----------------------------------------------------------------------
        def GetDoxygenValue(tag, content):
            match = re.search(
                r"{}[ \t]*=[ \t]*(?P<value>.*?)\r?\n".format(re.escape(tag)),
                content,
                re.IGNORECASE,
            )

            if not match:
                raise GetDoxygenValueError(
                    "Unable to find '{}' in the doxygen configuration file".
                    format(tag), )

            return match.group("value")

        # ----------------------------------------------------------------------

        results = OrderedDict()

        dm.stream.write(
            "Processing {}...".format(
                inflect.no("doxygen file", len(doxygen_files))), )
        with dm.stream.DoneManager(suffix="\n", ) as doxygen_dm:
            for index, doxygen_file in enumerate(doxygen_files):
                doxygen_dm.stream.write(
                    "Processing '{}' ({} of {})...".format(
                        doxygen_file,
                        index + 1,
                        len(doxygen_files),
                    ), )
                with doxygen_dm.stream.DoneManager() as this_dm:
                    prev_dir = os.getcwd()

                    os.chdir(os.path.dirname(doxygen_file))
                    with CallOnExit(lambda: os.chdir(prev_dir)):
                        # Execute
                        this_dm.result = Process.Execute(
                            'dot -c && doxygen "{}"'.format(doxygen_file),
                            StreamDecorator(
                                this_dm.stream if verbose else None),
                        )

                        if this_dm.result != 0:
                            continue

                        # Extract data from the doxygen file
                        with open(doxygen_file) as f:
                            content = f.read()

                        project_name = GetDoxygenValue("PROJECT_NAME", content)

                        # Older doxygen files don't have a PROJECT_VERSION
                        try:
                            project_version = GetDoxygenValue(
                                "PROJECT_VERSION", content)
                        except GetDoxygenValueError:
                            project_version = GetDoxygenValue(
                                "PROJECT_NUMBER", content)

                        output_directory = GetDoxygenValue(
                            "OUTPUT_DIRECTORY", content)

                        source_dir = os.path.dirname(doxygen_file)
                        if output_directory:
                            output_directory = os.pth.join(
                                source_dir, output_directory)

                        dest_dir = os.path.join(output_dir, project_name)
                        if project_version:
                            dest_dir = os.path.join(dest_dir, project_version)

                        dest_dir = dest_dir.replace('"', "").strip()
                        FileSystem.MakeDirs(dest_dir)

                        for content_type in [
                                "html",
                                "Latex",
                                "RTF",
                                "man",
                                "XML",
                        ]:
                            value = GetDoxygenValue(
                                "GENERATE_{}".format(content_type),
                                content,
                            )
                            if not value or value.lower() != "yes":
                                continue

                            output_name = GetDoxygenValue(
                                "{}_OUTPUT".format(content_type),
                                content,
                            )

                            source_fullpath = os.path.join(
                                source_dir, output_name)
                            dest_fullpath = os.path.join(dest_dir, output_name)

                            if not os.path.isdir(source_fullpath):
                                this_dm.stream.write(
                                    "ERROR: The directory '{}' does not exist.\n"
                                    .format(source_fullpath, ), )
                                this_dm.result = -1
                                continue

                            FileSystem.RemoveTree(dest_fullpath)
                            shutil.move(source_fullpath, dest_fullpath)

                            results.setdefault(
                                doxygen_file,
                                OrderedDict())[content_type] = dest_fullpath

                        # Tagfile
                        value = GetDoxygenValue("GENERATE_TAGFILE", content)
                        if value:
                            source_fullpath = os.path.join(source_dir, value)
                            dest_fullpath = os.path.join(dest_dir, value)

                            if not os.path.isfile(source_fullpath):
                                this_dm.stream.write(
                                    "ERROR: The filename '{}' does not exist.\n"
                                    .format(source_fullpath, ), )
                                this_dm.result = -1
                                continue

                            FileSystem.RemoveFile(dest_fullpath)
                            shutil.move(source_fullpath, dest_fullpath)

                            results.setdefault(
                                doxygen_file,
                                OrderedDict())["tagfile"] = dest_fullpath

        # Generate the json file
        output_filename = os.path.join(
            output_dir,
            "{}.json".format(os.path.splitext(_script_name)[0]),
        )

        dm.stream.write("Writing '{}'...".format(output_filename))
        with dm.stream.DoneManager() as this_dm:
            with open(output_filename, "w") as f:
                json.dump(results, f)

        return dm.result
Example #14
0
def EntryPoint(
    plugin,
    input_filename,
    output_dir,
    include=None,
    exclude=None,
    output_stream=sys.stdout,
):
    """Generates content based on a configuration file according to the specified plugin"""

    plugin = PLUGINS[plugin]

    # ----------------------------------------------------------------------
    def ToRegex(value):
        try:
            return re.compile(value)
        except:
            raise CommandLine.UsageException(
                "'{}' is not a valid regular expression".format(value), )

    # ----------------------------------------------------------------------

    includes = [ToRegex(arg) for arg in include]
    del include

    excludes = [ToRegex(arg) for arg in exclude]
    del exclude

    with StreamDecorator(output_stream).DoneManager(
            line_prefix="",
            prefix="\nResults: ",
            suffix="\n",
    ) as dm:
        dm.stream.write("Reading input data...")
        with dm.stream.DoneManager() as this_dm:
            try:
                data = Serialization.Deserialize(input_filename)
            except Exception as e:
                this_dm.stream.write(
                    textwrap.dedent(
                        """\
                        ERROR: {}
                               {}
                        """, ).format(
                            StringHelpers.LeftJustify(str(e), len("ERROR: ")),
                            str(getattr(e, "stack", None)),
                        ), )

                this_dm.result = -1
                return this_dm.result

        nonlocals = CommonEnvironment.Nonlocals(skipped=0, )

        dm.stream.write("Preprocessing data...")
        with dm.stream.DoneManager(
                done_suffix=lambda: "{} were skipped".format(
                    inflect.no("file", nonlocals.skipped), ),
                suffix=lambda: "\n" if nonlocals.skipped else None,
        ) as this_dm:
            # ----------------------------------------------------------------------
            def NormalizeEnum(enum):
                # Simplify the provided enum structure be creating an ordered dictionary with names and values
                if hasattr(enum, "integer_values"):
                    if len(enum.integer_values) != len(enum.values):
                        raise Exception(
                            "When integer values are specified for an enum, the number of integers must match the number of enums ('{}', '{}')"
                            .format(enum.values, enum.integer_values))

                    integer_values = enum.integer_values
                    del enum.integer_values
                else:
                    integer_values = list(
                        range(enum.starting_index,
                              enum.starting_index + len(enum.values)))

                del enum.starting_index

                assert len(
                    enum.values) == len(integer_values), (enum.values,
                                                          integer_values)
                enum.values = OrderedDict([
                    (k, v) for k, v in zip(enum.values, integer_values)
                ])

                return enum

            # ----------------------------------------------------------------------

            # Get the global custom structs
            global_custom_struct_names = set()
            global_custom_structs = []

            for item in data.custom_structs:
                if item.name in global_custom_struct_names:
                    raise Exception(
                        "The custom struct '{}' has already been defined".
                        format(item.name))

                global_custom_struct_names.add(item.name)
                global_custom_structs.append(item)

            # Get the global custom enums
            global_custom_enum_names = set()
            global_custom_enums = []

            for item in data.custom_enums:
                if item.name in global_custom_enum_names:
                    raise Exception(
                        "The custom enum '{}' has already been defined".format(
                            item.name))

                global_custom_enum_names.add(item.name)
                global_custom_enums.append(NormalizeEnum(item))

            # If there are templates at play, preprocess the content and expand the values
            new_data = []

            for item in data.featurizers:
                if item.status != "Available":
                    this_dm.stream.write(
                        "The status for '{}' is set to '{}' and will not be processed.\n"
                        .format(
                            item.name,
                            item.status,
                        ), )
                    nonlocals.skipped += 1

                    continue

                if excludes and any(
                        exclude.match(item.name) for exclude in excludes):
                    this_dm.stream.write(
                        "'{}' has been explicitly excluded.\n".format(
                            item.name), )
                    nonlocals.skipped += 1

                    continue

                if includes and not any(
                        include.match(item.name) for include in includes):
                    this_dm.stream.write(
                        "'{}' has not been included.\n".format(item.name), )
                    nonlocals.skipped += 1

                    continue

                for enum in getattr(item, "custom_enums", []):
                    NormalizeEnum(enum)

                if not hasattr(item, "templates"):
                    assert item.type_mappings

                    for mapping in item.type_mappings:
                        new_item = copy.deepcopy(item)

                        new_item.input_type = mapping.input_type
                        new_item.output_type = mapping.output_type

                        new_data.append([new_item])

                    continue

                new_data_items = []
                for template in item.templates:
                    regex = re.compile(r"\b{}\b".format(template.name))

                    for template_type in template.types:
                        new_item = copy.deepcopy(item)
                        new_item.template = template_type

                        # Remove the template mapping and list of templates
                        del new_item.templates
                        del new_item.type_mappings

                        for configuration_param in getattr(
                                new_item,
                                "configuration_params",
                            [],
                        ):
                            configuration_param.type = regex.sub(
                                template_type,
                                configuration_param.type,
                            )

                        for custom_struct in getattr(new_item,
                                                     "custom_structs", []):
                            if any(gcs for gcs in global_custom_structs
                                   if gcs.name == custom_struct.name):
                                raise Exception(
                                    "The custom structure '{}' in '{}' has already been defined as a global custom struct.\n"
                                    .format(custom_struct.name, item.name))

                            for member in custom_struct.members:
                                member.type = regex.sub(
                                    template_type, member.type)

                        for custom_enum in getattr(new_item, "custom_enums",
                                                   []):
                            if any(gce for gce in global_custom_enums
                                   if gce.name == custom_enum.name):
                                raise Exception(
                                    "The custom enum '{}' in '{}' has already been defined as a global custom enum.\n"
                                    .format(custom_enum.name, item.name))

                            custom_enum.underlying_type = regex.sub(
                                template_type, custom_enum.underlying_type)

                        for mapping in item.type_mappings:
                            # TODO: sub all types (for example: map<K, V>
                            if not regex.search(
                                    mapping.input_type) and not regex.search(
                                        mapping.output_type):
                                continue

                            new_item.input_type = regex.sub(
                                template_type, mapping.input_type)
                            if new_item.input_type != mapping.input_type:
                                new_item.input_type_template_mapping = OrderedDict(
                                    [
                                        (template_type, template.name),
                                    ], )

                            new_item.output_type = regex.sub(
                                template_type, mapping.output_type)
                            if new_item.output_type != mapping.output_type:
                                new_item.output_type_template_mapping = OrderedDict(
                                    [
                                        (template_type, template.name),
                                    ], )

                            # This will end up copying one more time than needed, but I couldn't think of a better way for now.
                            new_data_items.append(copy.deepcopy(new_item))

                new_data.append(new_data_items)

            data = new_data

        # Validate parameters
        dm.stream.write("Validating types...")
        with dm.stream.DoneManager():
            for items in data:
                for item in items:
                    # ----------------------------------------------------------------------
                    def IsSupportedType(typename):
                        for potential_type in SUPPORTED_TYPES:
                            if hasattr(potential_type, "match"):
                                if potential_type.match(typename):
                                    return True

                            elif typename == potential_type:
                                return True

                        return False

                    # ----------------------------------------------------------------------
                    def IsCustomStructType(typename):
                        return any(custom_struct
                                   for custom_struct in itertools.chain(
                                       getattr(item, "custom_structs", []),
                                       global_custom_structs)
                                   if custom_struct.name == typename)

                    # ----------------------------------------------------------------------
                    def IsCustomEnumType(typename):
                        return any(custom_enum
                                   for custom_enum in itertools.chain(
                                       getattr(item, "custom_enums", []),
                                       global_custom_enums)
                                   if custom_enum.name == typename)

                    # ----------------------------------------------------------------------

                    input_type = item.input_type
                    if input_type.endswith("?"):
                        input_type = input_type[:-1]

                    if (not IsSupportedType(input_type)
                            and not IsCustomStructType(input_type)
                            and not IsCustomEnumType(input_type)):
                        raise Exception(
                            "The input type '{}' defined in '{}' is not valid."
                            .format(
                                input_type,
                                item.name,
                            ), ) from None

                    output_type = item.output_type
                    if output_type.endswith("?"):
                        output_type = output_type[:-1]

                    if (not IsSupportedType(output_type)
                            and not IsCustomStructType(output_type)
                            and not IsCustomEnumType(output_type)):
                        raise Exception(
                            "The output type '{}' defined in '{}' is not valid."
                            .format(
                                output_type,
                                item.name,
                            ), ) from None

        dm.stream.write("Generating content...")
        with dm.stream.DoneManager() as this_dm:
            FileSystem.MakeDirs(output_dir)

            # ----------------------------------------------------------------------
            def CalcHash(filename):
                hash = hashlib.sha256()

                with open(filename, "rb") as f:
                    while True:
                        block = f.read(4096)
                        if not block:
                            break

                        hash.update(block)

                return hash.digest()

            # ----------------------------------------------------------------------
            @contextlib.contextmanager
            def FileWriter(filename, mode):
                """\
                Method that writes to a temporary location and only copies to the intended
                destination if there are changes. This prevents full rebuilds (which are
                triggered based on timestamps) on files that haven't changed.
                """

                temp_filename = CurrentShell.CreateTempFilename()
                with open(temp_filename, mode) as f:
                    yield f

                if not os.path.isfile(filename) or CalcHash(
                        temp_filename) != CalcHash(filename):
                    FileSystem.RemoveFile(filename)
                    shutil.move(temp_filename, filename)
                else:
                    FileSystem.RemoveFile(temp_filename)

            # ----------------------------------------------------------------------

            this_dm.result = plugin.Generate(
                FileWriter,
                global_custom_structs,
                global_custom_enums,
                data,
                output_dir,
                this_dm.stream,
            )
            if this_dm.result != 0:
                return this_dm.result

        return dm.result
Example #15
0
    def Build( force=False,
               no_squash=False,
               keep_temporary_image=False,
               output_stream=sys.stdout,
               preserve_ansi_escape_sequences=False,
             ):
        with StreamDecorator.GenerateAnsiSequenceStream( output_stream,
                                                         preserve_ansi_escape_sequences=preserve_ansi_escape_sequences,
                                                       ) as output_stream:
            with StreamDecorator(output_stream).DoneManager( line_prefix='',
                                                             prefix="\nResults: ",
                                                             suffix='\n',
                                                           ) as dm:
                if not _VerifyDocker():
                    dm.stream.write("ERROR: Ensure that docker is installed and available within this environment.\n")
                    dm.result = -1

                    return dm.result

                output_dir = os.path.join(calling_dir, "Generated")

                source_dir = os.path.join(output_dir, "Source")
                base_image_dir = os.path.join(output_dir, "Images", "Base")
                activated_image_dir = os.path.join(output_dir, "Images", "Activated")

                image_code_base = "/usr/lib/CommonEnvironmentImage"
                image_code_dir = "{}/{}".format( image_code_base,
                                                 repository_name.replace('_', '/'),
                                               )

                if no_now_tag:
                    now_tag = None
                else:
                    now = time.localtime()
                    now_tag = "{0}.{1:02d}.{2:02d}".format(now[0], now[1], now[2])
                    
                # Create the base image
                dm.stream.write("Creating base image...")
                with dm.stream.DoneManager(suffix='\n') as base_dm:
                    FileSystem.MakeDirs(base_image_dir)

                    # Get the source
                    scm = GetAnySCM(calling_dir)
                    
                    if not os.path.isdir(source_dir):
                        base_dm.stream.write("Cloning source...")
                        with base_dm.stream.DoneManager() as this_dm:
                            # Ensure that the parent dir exists, but don't create the dir iteself.
                            FileSystem.MakeDirs(os.path.dirname(source_dir))
                    
                            # Enlist in the repo. 
                            temp_dir = CurrentShell.CreateTempDirectory()
                            FileSystem.RemoveTree(temp_dir)
                    
                            this_dm.result, output = scm.Clone(repository_uri, temp_dir)
                            if this_dm.result != 0:
                                this_dm.stream.write(output)
                                return this_dm.result
                    
                            os.rename(temp_dir, source_dir)
                    
                        has_changes = True
                    else:
                        # The repo exists
                        base_dm.stream.write("Updating source...")
                        with base_dm.stream.DoneManager() as this_dm:
                            this_dm.result, output = scm.Pull(source_dir)
                            if this_dm.result != 0:
                                this_dm.stream.write(output)
                                return this_dm.result
                    
                            has_changes = True
                    
                            if scm.Name == "Mercurial":
                                if "no changes found" in output:
                                    has_changes = False
                            elif scm.Name == "Git":
                                if "Already up-to-date" in output:
                                    has_changes = False
                            else:
                                assert False, "Unsupported SCM: {}".format(scm.Name)
                    
                            if has_changes:
                                this_dm.result, output = scm.Update(source_dir)
                                if this_dm.result != 0:
                                    this_dm.stream.write(output)
                                    return this_dm.result
                    
                    # Filter the source
                    filtered_source_dir = os.path.join(base_image_dir, "FilteredSource")

                    if os.path.isdir(filtered_source_dir) and not force and not has_changes:
                        base_dm.stream.write("No source changes were detected.\n")
                    else:
                        with base_dm.stream.SingleLineDoneManager( "Filtering source...",
                                                                 ) as this_dm:
                            temp_dir = CurrentShell.CreateTempDirectory()
                            FileSystem.RemoveTree(temp_dir)
                    
                            FileSystem.CopyTree( source_dir,
                                                 temp_dir,
                                                 excludes=[ "/.git",
                                                            "/.gitignore",
                                                            "/.hg",
                                                            "/.hgignore",
                    
                                                            "*/Generated",
                                                            "*/__pycache__",
                                                            "*/Windows",
                                                            "/*/src",
                    
                                                            "*.cmd",
                                                            "*.ps1",
                                                            "*.pyc",
                                                            "*.pyo",
                                                          ],
                                                 optional_output_stream=this_dm.stream,
                                               )
                    
                            FileSystem.RemoveTree(filtered_source_dir)
                    
                            os.rename(temp_dir, filtered_source_dir)
                    
                    base_dm.stream.write("Verifying Docker base image...")
                    with base_dm.stream.DoneManager() as this_dm:
                        this_dm.result, output = Process.Execute('docker image history "{}"'.format(base_docker_image))
                        if this_dm.result != 0:
                            this_dm.stream.write(output)
                            return this_dm.result
                    
                    base_dm.stream.write("Creating dockerfile...")
                    with base_dm.stream.DoneManager():
                        setup_statement = "./Setup.sh{}".format('' if not repository_setup_configurations else ' {}'.format(' '.join([ '"/configuration={}"'.format(configuration) for configuration in repository_setup_configurations ])))
                    
                        if repository_name == "Common_Environment":
                            commands = textwrap.dedent(
                                            """\
                                            RUN link /usr/bin/python3 /usr/bin/python
                    
                                            RUN adduser --disabled-password --disabled-login --gecos "" "{username}" \\
                                             && addgroup "{groupname}" \\
                                             && adduser "{username}" "{groupname}"
                    
                                            RUN cd {image_code_dir} \\
                                             && {setup_statement}
                    
                                            """).format( username=image_username,
                                                         groupname=image_groupname,
                                                         image_code_dir=image_code_dir,
                                                         setup_statement=setup_statement,
                                                       )
                        else:
                            import io
                    
                            with io.open( os.path.join(base_image_dir, "SetupEnvironmentImpl.sh"),
                                          'w',
                                          newline='\n',
                                        ) as f:
                                f.write(textwrap.dedent(
                                            """\
                                            #!/bin/bash
                                            . {image_code_base}/Common/Environment/Activate.sh python36
                                            cd {image_code_dir}
                                            {setup_statement}
                                            rm --recursive {image_code_base}/Common/Environment/Generated/Linux/Default
                                            """).format( image_code_base=image_code_base,
                                                         image_code_dir=image_code_dir,
                                                         setup_statement=setup_statement,
                                                       ))
                    
                            commands = textwrap.dedent(
                                            """\
                                            COPY SetupEnvironmentImpl.sh /tmp/SetupEnvironmentImpl.sh
                    
                                            RUN chmod a+x /tmp/SetupEnvironmentImpl.sh \\
                                             && /tmp/SetupEnvironmentImpl.sh
                                            """)
                    
                        with open(os.path.join(base_image_dir, "Dockerfile"), 'w') as f:
                            f.write(textwrap.dedent(
                                """\
                                FROM {base_image}
                    
                                COPY FilteredSource {image_code_dir}
                    
                                {commands}
                    
                                RUN chown -R {username}:{groupname} {image_code_dir} \\
                                 && chmod g-s {image_code_dir}/Generated/Linux \\
                                 && chmod 0750 {image_code_dir}/Generated/Linux \\
                                 && chmod -R o-rwx {image_code_dir}
                    
                                # Cleanup
                                RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
                    
                                LABEL maintainer="{maintainer}"
                    
                                # By default, run a bash prompt as the source code user
                                WORKDIR {image_code_dir}
                                CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ]
                    
                                """).format( base_image=base_docker_image,
                                             commands=commands,
                                             username=image_username,
                                             groupname=image_groupname,
                                             image_code_dir=image_code_dir,
                                             maintainer=maintainer,
                                           ))
                    
                    base_dm.stream.write("Building Docker image...")
                    with base_dm.stream.DoneManager() as this_dm:
                        tags = [ "base",
                                 "base_latest",
                               ]

                        if now_tag:
                            tags.append("base_{}".format(now_tag))

                        command_line = 'docker build "{dir}" {tags}{squash}{force}' \
                                            .format( dir=base_image_dir,
                                                     tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]),
                                                     squash='' if no_squash else " --squash",
                                                     force=" --no-cache" if force else '',
                                                   )
                        this_dm.result = Process.Execute(command_line, this_dm.stream)
                        if this_dm.result != 0:
                            return this_dm.result
                    
                if not no_activated_image:
                    # Create the activated image(s)
                    dm.stream.write("Creating activated image(s)...")
                    with dm.stream.DoneManager() as all_activated_dm:
                        for index, configuration in enumerate(repository_activation_configurations):
                            all_activated_dm.stream.write("Creating activated image{} ({} of {})...".format( '' if not configuration else " for the configuration '{}'".format(configuration),
                                                                                                             index + 1,
                                                                                                             len(repository_activation_configurations),
                                                                                                           ))
                            with all_activated_dm.stream.DoneManager(suffix='\n') as activated_dm:
                                this_activated_dir = os.path.join(activated_image_dir, configuration or "Default")
                                FileSystem.MakeDirs(this_activated_dir)

                                unique_id = str(uuid.uuid4())

                                temp_image_name = "{}_image".format(unique_id)
                                temp_container_name = "{}_container".format(unique_id)

                                # Activate the image so we can extract the changes
                                activated_dm.stream.write("Activating...")
                                with activated_dm.stream.DoneManager(suffix='\n') as this_dm:
                                    command_line = 'docker run -it --name "{container_name}" "{image_name}:base_latest" /sbin/my_init -- /sbin/setuser "{username}" bash -c "cd {image_code_dir} && . ./Activate.sh {configuration} && pushd {image_code_base}/Common/Environment && python -m RepositoryBootstrap.EnvironmentDiffs After /decorate' \
                                                        .format( container_name=temp_container_name,
                                                                 image_name=docker_image_name,
                                                                 configuration=configuration or '',
                                                                 username=image_username,
                                                                 image_code_dir=image_code_dir,
                                                                 image_code_base=image_code_base,
                                                               )

                                    sink = six.moves.StringIO()

                                    this_dm.result = Process.Execute(command_line, StreamDecorator([ sink, this_dm.stream, ]))
                                    if this_dm.result != 0:
                                        return this_dm.result

                                    sink = sink.getvalue()

                                activated_dm.stream.write("Extracting enviroment diffs...")
                                with activated_dm.stream.DoneManager():
                                    match = re.search( textwrap.dedent(
                                                            """\
                                                            //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//
                                                            (?P<content>.+?)
                                                            //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//
                                                            """),
                                                       sink,
                                                       re.DOTALL | re.MULTILINE,
                                                     )
                                    assert match, sink

                                    environment_diffs = json.loads(match.group("content"))

                                # ----------------------------------------------------------------------
                                def RemoveTempContainer():
                                    activated_dm.stream.write("Removing temp container...")
                                    with activated_dm.stream.DoneManager() as this_dm:
                                        this_dm.result, output = Process.Execute('docker rm "{}"'.format(temp_container_name))
                                        if this_dm.result != 0:
                                            this_dm.stream.write(output)

                                # ----------------------------------------------------------------------

                                with CallOnExit(RemoveTempContainer):
                                    # Commit the activated image
                                    activated_dm.stream.write("Committing container...")
                                    with activated_dm.stream.DoneManager() as this_dm:
                                        command_line = 'docker commit "{container_name}" "{image_name}"' \
                                                            .format( container_name=temp_container_name,
                                                                     image_name=temp_image_name,
                                                                   )

                                        this_dm.result, output = Process.Execute(command_line)
                                        if this_dm.result != 0:
                                            this_dm.stream.write(output)
                                            return this_dm.result

                                    # ----------------------------------------------------------------------
                                    def RemoveTempImage():
                                        if keep_temporary_image:
                                            return

                                        activated_dm.stream.write("Removing temp image...")
                                        with activated_dm.stream.DoneManager() as this_dm:
                                            this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name))
                                            if this_dm.result != 0:
                                                this_dm.stream.write(output)

                                    # ----------------------------------------------------------------------

                                    with CallOnExit(RemoveTempImage):
                                        # Create a new dockerfile. The temp image has all the harddrive changes
                                        # made during activation, but doesn't have the environment changes.
                                        activated_dm.stream.write("Creating dockerfile...")
                                        with activated_dm.stream.DoneManager() as this_dm:
                                            with open(os.path.join(this_activated_dir, "Dockerfile"), 'w') as f:
                                                f.write(textwrap.dedent(
                                                    """\
                                                    FROM {temp_image_name}

                                                    ENV {env}

                                                    # By default, run a bash prompt as the source code user
                                                    CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ]

                                                    LABEL maintainer="{maintainer}"

                                                    """).format( temp_image_name=temp_image_name,
                                                                 env='\\\n'.join([ '  {}={} '.format(k, v) for k, v in six.iteritems(environment_diffs) ]),
                                                                 image_code_dir=image_code_dir,
                                                                 maintainer=maintainer,
                                                                 username=image_username,
                                                               ))

                                        activated_dm.stream.write("Building Docker image...")
                                        with activated_dm.stream.DoneManager() as this_dm:
                                            tags = [ "latest",
                                                   ]

                                            if now_tag:
                                                tags.append(now_tag)

                                            if len(repository_activation_configurations) > 1:
                                                tags = [ "{}_{}".format(configuration, tag) for tag in tags ]
                                                tags.insert(0, configuration)

                                            command_line = 'docker build "{dir}" {tags}{squash}{force}' \
                                                                .format( dir=this_activated_dir,
                                                                         tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]),
                                                                         squash='', # <squash is not supported here> '' if no_squash else " --squash",
                                                                         force=" --no-cache" if force else '',
                                                                       )

                                            this_dm.result = Process.Execute(command_line, this_dm.stream)
                                            if this_dm.result != 0:
                                                return this_dm.result
                                
                return dm.result
def _GenerateGlobalDefs(
    open_file_func,
    output_dir,
    all_items,
    all_type_mappings,
    all_custom_struct_data,
    output_stream,
):
    output_dir = os.path.join(output_dir, "core", "graph", "featurizers_ops")
    FileSystem.MakeDirs(output_dir)

    with open_file_func(os.path.join(output_dir, "featurizers_defs.h"),
                        "w") as f:
        f.write(
            textwrap.dedent(
                """\
                // Copyright (c) Microsoft Corporation. All rights reserved.
                // Licensed under the MIT License.

                #pragma once

                namespace onnxruntime {
                namespace featurizers {

                void RegisterMSFeaturizersSchemas(void);

                }  // namespace featurizers
                }  // namespace onnxruntime
                """, ), )

    func_definitions = []

    # ----------------------------------------------------------------------
    def CreateOutputStatement(
        output_type,
        output_documentation,
        index=0,
        output_name="Output",
    ):
        return textwrap.dedent(
            """\
            .Output(
                {},
                "{}",
                "{}",
                "{}")
            """, ).format(
                index, output_name, output_documentation
                or "No information is available", output_type)

    # ----------------------------------------------------------------------
    def CreateTypeInferenceConstraints(output_type_mappings):
        code = []
        constraint_format = (
            "input_elem_type == ONNX_NAMESPACE::TensorProto_DataType_{input_type_upper}"
        )

        constraint_whitespace_prefix = "                        "

        for index, (output_type,
                    input_types) in enumerate(output_type_mappings.items()):
            constraints = []
            for input_type in input_types:
                constraints.append(
                    constraint_format.format(
                        input_type_upper=input_type.upper(), ), )

            code.append(
                textwrap.dedent(
                    """\
                    {end}if ({constraints}) {{
                      propagateElemTypeFromDtypeToOutput(ctx, ONNX_NAMESPACE::TensorProto_DataType_{output_type_upper}, 0);
                    }}""".format(
                        end="" if index == 0 else " else ",
                        output_type_upper=output_type.upper(),
                        constraints=(" ||\n{}".format(
                            constraint_whitespace_prefix)).join(constraints),
                    ), ), )

            if index == 0:
                constraint_whitespace_prefix += "       "

        code.append(
            textwrap.dedent(
                """\
                 else {
                  fail_type_inference("input 1 is expected to have an accepted type");
                }

                if (hasInputShape(ctx, 1)) {
                  propagateShapeFromInputToOutput(ctx, 1, 0);
                }
                """, ), )

        return "".join(code)

    # ----------------------------------------------------------------------
    def CreateMacro(
        input_type,
        input_documentation,
        output_statements,
        all_constraints,
        suffix=None,
    ):
        return textwrap.dedent(
            """\
            MS_FEATURIZERS_OPERATOR_SCHEMA({transformer_name})
                .SinceVersion(1)
                .SetDomain(kMSFeaturizersDomain)
                .SetDoc(doc)
                .Input(
                    0,
                    "State",
                    "State generated during training that is used for prediction",
                    "T0")
                .Input(
                    1,
                    "Input",
                    "{input_documentation}",
                    "{input_type}")
                {output_statements}
                .TypeConstraint(
                    "T0",
                    {{"tensor(uint8)"}},
                    "No information is available"){type_constraints}{suffix};
            """, ).format(
                transformer_name=transformer_name,
                input_documentation=StringHelpers.LeftJustify(
                    input_documentation or "No information is available",
                    8,
                ),
                input_type=input_type,
                output_statements=StringHelpers.LeftJustify(
                    "\n".join(output_statements).rstrip(),
                    4,
                ),
                type_constraints="\n    {}".format(
                    StringHelpers.LeftJustify(
                        "".join([
                            textwrap.dedent(
                                """\
                                .TypeConstraint(
                                    "{type_name}",
                                    {{{constraints}}},
                                    "No information is available")
                                """, ).format(
                                    type_name=type_name,
                                    constraints=", ".join([
                                        '"tensor({})"'.format(constraint)
                                        for constraint in constraints
                                    ], ),
                                ) for type_name, constraints in six.iteritems(
                                    all_constraints)
                        ], ).rstrip(),
                        4,
                    ), ) if all_constraints else "",
                suffix="\n    {}".format(
                    StringHelpers.LeftJustify(suffix.rstrip(), 4), )
                if suffix else "",
            )

    # ----------------------------------------------------------------------

    for items, type_mappings, custom_struct_data in zip(
            all_items,
            all_type_mappings,
            all_custom_struct_data,
    ):
        item = items[0]
        input_type_mappings, output_type_mappings = type_mappings

        transformer_name = item.name.replace("Featurizer", "Transformer")

        preprocessor_macros = []

        if _IsIdentityTypeMapping(input_type_mappings):
            preprocessor_macros.append(
                CreateMacro(
                    "T",
                    item.input_description,
                    [CreateOutputStatement("T", item.output_description)],
                    {"T": list(six.iterkeys(input_type_mappings))},
                    suffix=textwrap.dedent(
                        """\
                        .TypeAndShapeInferenceFunction(
                            [](ONNX_NAMESPACE::InferenceContext& ctx) {
                              propagateElemTypeFromInputToOutput(ctx, 1, 0);
                              if (hasInputShape(ctx, 1)) {
                                propagateShapeFromInputToOutput(ctx, 1, 0);
                              }
                            })
                        """, ),
                ), )

        else:
            # We need to create one combined function for all output types
            all_output_types = []
            all_input_types = []

            for output_type, input_types in six.iteritems(
                    output_type_mappings):
                all_output_types.append(output_type)
                all_input_types += input_types

            type_constraints = OrderedDict()

            if len(all_input_types) == 1:
                input_type = "tensor({})".format(
                    _GetCppTypeMapping(
                        all_input_types[0],
                        recurse=False,
                    ), )
            else:
                input_type = "InputT"
                type_constraints[input_type] = all_input_types

            if (custom_struct_data is None
                    or all_output_types[0] not in custom_struct_data):
                if len(all_output_types) == 1:
                    output_type = all_output_types[0]
                    output_statements = [
                        CreateOutputStatement(
                            "tensor({})".format(output_type),
                            item.output_description,
                        ),
                    ]
                    suffix = textwrap.dedent(
                        """\
                        .TypeAndShapeInferenceFunction(
                            [](ONNX_NAMESPACE::InferenceContext& ctx) {{
                              propagateElemTypeFromDtypeToOutput(ctx, ONNX_NAMESPACE::TensorProto_DataType_{output_type_upper}, 0);
                              if (hasInputShape(ctx, 1)) {{
                                propagateShapeFromInputToOutput(ctx, 1, 0);
                              }}
                            }})
                        """, ).format(output_type_upper=output_type.upper(), )
                else:
                    output_type = "OutputT"
                    type_constraints[output_type] = all_output_types
                    output_statements = [
                        CreateOutputStatement("OutputT",
                                              item.output_description),
                    ]
                    suffix = textwrap.dedent(
                        """\
                        .TypeAndShapeInferenceFunction(
                            [](ONNX_NAMESPACE::InferenceContext& ctx) {{
                              auto input_elem_type = ctx.getInputType(1)->tensor_type().elem_type();
                              {constraints}
                            }})
                        """, ).format(
                            output_type_upper=output_type.upper(),
                            constraints=StringHelpers.LeftJustify(
                                CreateTypeInferenceConstraints(
                                    output_type_mappings), 6),
                        )
            else:
                assert custom_struct_data
                assert len(custom_struct_data) == 1, custom_struct_data

                (output_statements, custom_type_constraints,
                 suffix) = custom_struct_data[
                     output_type].GetDefOutputStatementsConstraintsAndSuffix()

                for k, v in six.iteritems(custom_type_constraints):
                    type_constraints[k] = v

            # Populate the content
            preprocessor_macros.append(
                CreateMacro(
                    input_type,
                    item.input_description,
                    output_statements,
                    type_constraints,
                    suffix=suffix,
                ), )

        func_definitions.append(
            textwrap.dedent(
                """\
                void Register{featurizer_name}Ver1() {{
                  static const char* doc = R"DOC(
                        {documentation}
                  )DOC";

                  {macros}
                }}
                """, ).format(
                    featurizer_name=items[0].name,
                    documentation=StringHelpers.LeftJustify(
                        item.description or "No information is available",
                        8,
                    ),
                    macros=StringHelpers.LeftJustify(
                        "\n".join(preprocessor_macros).rstrip(),
                        2,
                    ),
                ), )

    with open_file_func(os.path.join(output_dir, "featurizers_defs.cc"),
                        "w") as f:
        f.write(
            textwrap.dedent(
                """\
                // Copyright (c) Microsoft Corporation. All rights reserved.
                // Licensed under the MIT License.

                #include "core/graph/constants.h"
                #include "core/graph/featurizers_ops/featurizers_defs.h"
                #include "core/graph/op.h"

                #include "onnx/defs/schema.h"
                #include "onnx/defs/shape_inference.h"

                #define MS_FEATURIZERS_OPERATOR_SCHEMA(name) MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_HELPER(__COUNTER__, name)
                #define MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_HELPER(Counter, name) MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ(Counter, name)

                #define MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ(Counter, name)       \\
                  static ONNX_NAMESPACE::OpSchemaRegistry::OpSchemaRegisterOnce( \\
                      op_schema_register_once##name##Counter) ONNX_UNUSED =      \\
                      ONNX_NAMESPACE::OpSchema(#name, __FILE__, __LINE__)

                #define MS_FEATURIZERS_OPERATOR_SCHEMA_ELSEWHERE(name, schema_func) MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_HELPER_ELSEWHERE(__COUNTER__, name, schema_func)
                #define MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_HELPER_ELSEWHERE(Counter, name, schema_func) MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_ELSEWHERE(Counter, name, schema_func)

                #define MS_FEATURIZERS_OPERATOR_SCHEMA_UNIQ_ELSEWHERE(Counter, name, schema_func) \\
                  static ONNX_NAMESPACE::OpSchemaRegistry::OpSchemaRegisterOnce(                  \\
                      op_schema_register_once##name##Counter) ONNX_UNUSED =                       \\
                      schema_func(ONNX_NAMESPACE::OpSchema(#name, __FILE__, __LINE__))

                namespace onnxruntime {{
                namespace featurizers {{

                using ONNX_NAMESPACE::AttributeProto;
                using ONNX_NAMESPACE::OpSchema;
                using ONNX_NAMESPACE::OPTIONAL;

                // Forward declarations
                {forward_declarations}

                // ----------------------------------------------------------------------
                // ----------------------------------------------------------------------
                // ----------------------------------------------------------------------
                void RegisterMSFeaturizersSchemas() {{
                  {func_calls}
                }}

                // ----------------------------------------------------------------------
                // ----------------------------------------------------------------------
                // ----------------------------------------------------------------------
                {func_definitions}

                }}  // namespace featurizers
                }}  // namespace onnxruntime
                """, ).format(
                    forward_declarations="\n".join([
                        "static void Register{}Ver1();".format(items[0].name)
                        for items in all_items
                    ], ),
                    func_calls=StringHelpers.LeftJustify(
                        "\n".join([
                            "Register{}Ver1();".format(items[0].name)
                            for items in all_items
                        ], ),
                        2,
                    ),
                    func_definitions="\n".join(func_definitions).rstrip(),
                ), )

    return 0
Example #17
0
def Build(
    configuration,
    output_dir,
    release_build=False,
    prerelease_build_name=None,
    no_build_info=False,
    keep_temp_dir=False,
    cmake_generator=(
        None if os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" or os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_USE_DEFAULT_CMAKE_GENERATOR") else "Ninja"
    ),
    output_stream=sys.stdout,
    verbose=False,
):
    """Builds the Featurizer Shared Library"""

    if release_build and prerelease_build_name:
        raise CommandLine.UsageException(
            "A prerelese build name cannot be provided with the 'release_build' flag",
        )

    with StreamDecorator(output_stream).DoneManager(
        line_prefix="",
        prefix="\nResults: ",
        suffix="\n",
    ) as dm:
        FileSystem.RemoveTree(output_dir)
        FileSystem.MakeDirs(output_dir)

        temp_directory = CurrentShell.CreateTempDirectory()

        # ----------------------------------------------------------------------
        def CleanupTempDir():
            if keep_temp_dir:
                dm.stream.write(
                    "\nCMake output has been written to '{}'.\n".format(temp_directory),
                )
                return

            FileSystem.RemoveTree(temp_directory)

        # ----------------------------------------------------------------------

        with CallOnExit(CleanupTempDir):
            prev_dir = os.getcwd()
            os.chdir(temp_directory)

            with CallOnExit(lambda: os.chdir(prev_dir)):
                if not release_build:
                    if prerelease_build_name is None:
                        # This value should compare as:
                        #   "manual" < "pipeline"
                        prerelease_build_name = "manual"

                    if not no_build_info:
                        now = datetime.datetime.now()

                        prerelease_build_name = "{prerelease_build_name}.{year}.{month}.{day}.{hour}.{minute}.{second}.{configuration}".format(
                            year=now.year,
                            month=now.month,
                            day=now.day,
                            hour=now.hour,
                            minute=now.minute,
                            second=now.second,
                            prerelease_build_name=prerelease_build_name,
                            configuration=configuration.lower(),
                        )

                activities = [
                    (
                        "Generating cmake Files",
                        'cmake {generator}-DCMAKE_BUILD_TYPE={configuration} {prerelease_build_name} "{this_dir}"'.format(
                            generator='-G "{}" '.format(
                                cmake_generator,
                            ) if cmake_generator else "",
                            temp_dir=temp_directory,
                            configuration=configuration,
                            this_dir=_script_dir,
                            prerelease_build_name="" if not prerelease_build_name else "-DPRODUCT_VERSION_PRERELEASE_INFO={}".format(
                                prerelease_build_name,
                            ),
                        ),
                    ),
                    ("Building", "cmake --build ."),
                ]

                if (
                    os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION")
                    == "universal_linux"
                ):
                    activities.append(
                        (
                            "Verifying Universal Linux Binaries",
                            "libcheck libFeaturizers.so",
                        ),
                    )

                activities += [
                    ("Copying Binaries", _CopyBinaries),
                    ("Copying Data", _CopyData),
                    ("Copying Headers", _CopyHeaders),
                ]

                for index, (activity, command_line) in enumerate(activities):
                    dm.stream.write(
                        "{} ({} of {})...".format(activity, index + 1, len(activities)),
                    )
                    with dm.stream.DoneManager(
                        suffix="\n" if verbose else None,
                    ) as this_dm:
                        sink = six.moves.StringIO()

                        output_streams = [sink]

                        if verbose:
                            output_streams.append(
                                StreamDecorator(
                                    this_dm.stream,
                                    line_prefix="INFO: ",
                                ),
                            )

                        this_output_stream = StreamDecorator(output_streams)

                        if callable(command_line):
                            this_dm.result = command_line(
                                temp_directory,
                                output_dir,
                                this_output_stream,
                            )
                        else:
                            this_dm.result = Process.Execute(
                                command_line,
                                this_output_stream,
                            )

                        if this_dm.result != 0:
                            if not verbose:
                                this_dm.stream.write(sink.getvalue())

                            return this_dm.result

        return dm.result
Example #18
0
def Package(
    output_dir,
    build_dir,
    output_stream=sys.stdout,
    verbose=False,
):
    """Packages previously built content"""

    build_dirs = build_dir
    del build_dir

    with StreamDecorator(output_stream).DoneManager(
        line_prefix="",
        prefix="\nResults: ",
        suffix="\n",
    ) as dm:
        if len(build_dirs) > 1:
            dm.stream.write("Ensuring that build data matches...")
            with dm.stream.DoneManager() as ensure_dm:
                ensure_dm.stream.write("Checking '{}'...".format(JSON_FILENAME))
                with ensure_dm.stream.DoneManager() as this_dm:
                    this_dm.result = (
                        0
                        if _CompareFiles(
                            this_dm.stream,
                            *[
                                os.path.join(build_dir, JSON_FILENAME)
                                for build_dir in build_dirs
                            ]
                        )
                        else -1
                    )
                    if this_dm.result != 0:
                        return this_dm.result

                ensure_dm.stream.write("Checking 'Data' directories...")
                with ensure_dm.stream.DoneManager() as this_dm:
                    this_dm.result = (
                        0
                        if _CompareDirectories(
                            this_dm.stream,
                            *[os.path.join(build_dir, "Data") for build_dir in build_dirs]
                        )
                        else -1
                    )
                    if this_dm.result != 0:
                        return this_dm.result

        dm.stream.write("Reading build configuration...")
        with dm.stream.DoneManager() as this_dm:
            json_filename = os.path.join(build_dirs[0], JSON_FILENAME)
            if not os.path.isfile(json_filename):
                this_dm.stream.write(
                    "ERROR: The filename '{}' does not exist.\n".format(json_filename),
                )
                this_dm.result = -1

                return this_dm.result

            with open(json_filename) as f:
                build_config = json.load(f)

            build_config["build_dir"] = build_dirs[0]
            build_config["data_dir"] = os.path.join(build_dirs[0], "Data", "**", "*.*")
            build_config["package_id"] = build_config["product_name"].replace(" ", ".")
            build_config["product_copyright"] = build_config["product_copyright"].replace(
                "(C)",
                "©",
            )

        # Generate the correct nuget file statements based on output in the build_dir
        dm.stream.write("Generating nuget file statements...")
        with dm.stream.DoneManager() as this_dm:
            nuget_file_statements = {}

            for build_dir in build_dirs:
                these_files = []
                value_type = None

                for item in os.listdir(build_dir):
                    this_value_type = None

                    if item == "Featurizers.dll":
                        if "x86" in build_dir:
                            this_value_type = "runtimes/win-x86/native"
                        else:
                            this_value_type = "runtimes/win-x64/native"

                    elif item.startswith("libFeaturizers.so"):
                        this_value_type = "runtimes/linux-x64/native"

                    else:
                        name, ext = os.path.splitext(item)
                        if name.startswith("libFeaturizers") and ext == ".dylib":
                            this_value_type = "runtimes/osx-x64/native"

                    if this_value_type is not None:
                        assert value_type is None or this_value_type == value_type, (
                            value_type,
                            item,
                            this_value_type,
                        )

                        value_type = this_value_type
                        these_files.append(os.path.join(build_dir, item))

                if value_type in nuget_file_statements:
                    this_dm.stream.write(
                        "ERROR: The build directory '{}' overwrites previously captured content ({}: '{}').\n".format(
                            build_dir,
                            value_type,
                            nuget_file_statements[value_type],
                        ),
                    )
                    this_dm.result = -1

                    return this_dm.result

                nuget_file_statements[value_type] = these_files

            file_statements = []

            for k, v in six.iteritems(nuget_file_statements):
                for filename in v:
                    file_statements.append(
                        '<file src="{}" target="{}" />'.format(filename, k),
                    )

            build_config["file_statements"] = "\n".join(file_statements)

        FileSystem.MakeDirs(output_dir)

        dm.stream.write("Writing nuspec file...")
        with dm.stream.DoneManager():
            nuspec_filename = os.path.join(output_dir, "Featurizers.nuspec")
            with open(nuspec_filename, "w") as f:
                f.write(_nuget_template.format(**build_config))

        dm.stream.write("Running nuget...")
        with dm.stream.DoneManager() as this_dm:
            prev_dir = os.getcwd()

            os.chdir(output_dir)
            with CallOnExit(lambda: os.chdir(prev_dir)):
                this_dm.result = Process.Execute(
                    'nuget.exe pack "{}"'.format(nuspec_filename),
                    this_dm.stream,
                )
                if this_dm.result != 0:
                    return this_dm.result

        return dm.result
def _GenerateKernel(
    open_file_func,
    output_dir,
    items,
    input_type_mappings,
    output_type_mappings,
    custom_struct_data,
    status_stream,
):
    output_dir = os.path.join(output_dir, "featurizers_ops", "cpu")
    FileSystem.MakeDirs(output_dir)

    assert all(item.is_output_a_template == items[0].is_output_a_template
               for item in items)

    item = items[0]
    transformer_name = item.name.replace("Featurizer", "Transformer")

    prefix_statements = ""
    template_parameters = ""
    template_suffix = []

    if len(input_type_mappings) == 1:
        if len(output_type_mappings) != 1:
            # TODO: Implement this!
            status_stream.write(
                "Multiple output types is not supported yet ({}) - restricting output to the first type (this is not a good long term solution and should be addressed as soon as possible)\n"
                .format(item.name))

            key = next(six.iterkeys(output_type_mappings))
            output_type_mappings = {key: output_type_mappings[key]}

        input_type = _GetCppTypeMapping(next(
            six.iterkeys(input_type_mappings)))
        input_types = []

        # We don't need to define an Impl class Since there is only 1 input type
        class_content_template = textwrap.dedent(
            """\
            class {transformer_name} final : public OpKernel {{
             public:
              explicit {transformer_name}(const OpKernelInfo& info) : OpKernel(info) {{
              }}

              Status Compute(OpKernelContext* ctx) const override {{
                {content}

                return Status::OK();
              }}
            }};
            """, )

        additional_constraints = '.TypeConstraint("T1", DataTypeImpl::GetTensorType<{}>())'.format(
            input_type)

    else:
        input_type = "T" if _IsIdentityTypeMapping(
            input_type_mappings) else "InputT"
        input_types = [
            _GetCppTypeMapping(input_type)
            for input_type in six.iterkeys(input_type_mappings)
        ]

        template_parameters = "template <typename {}>\n".format(input_type)
        template_suffix.append(input_type)

        class_content_template = textwrap.dedent(
            """\
            {template_parameters}struct {transformer_name}Impl {{
              void operator()(OpKernelContext* ctx) const {{
                {content}
              }}
            }};

            class {transformer_name} final : public OpKernel {{
             public:
              explicit {transformer_name}(const OpKernelInfo& info) : OpKernel(info) {{
              }}

              Status Compute(OpKernelContext* ctx) const override {{
                utils::MLTypeCallDispatcher<{transformer_name}Impl, {input_types}> t_disp(ctx->Input<Tensor>(1)->GetElementType());
                t_disp.Invoke(ctx);
                return Status::OK();
              }}
            }};
            """, )

        additional_constraints = textwrap.dedent(
            """\
            .TypeConstraint("{input_type}", {{{constraints}}})
            """, ).format(
                input_type=input_type,
                constraints=StringHelpers.LeftJustify(
                    ",\n".join([
                        "DataTypeImpl::GetTensorType<{}>()".format(it)
                        for it in input_types
                    ]),
                    len('.TypeConstraint("{}", {{'.format(input_type)),
                ),
            ).rstrip()

    if len(output_type_mappings) == 1:
        output_type = next(six.iterkeys(output_type_mappings))
        output_type = _GetCppTypeMapping(output_type)
    else:
        overrides = []

        for output_type, mapped_input_types in six.iteritems(
                output_type_mappings):
            output_type = _GetCppTypeMapping(output_type)

            for mapped_input_type in mapped_input_types:
                mapped_input_type = _GetCppTypeMapping(mapped_input_type)

                overrides.append(
                    textwrap.dedent(
                        """\
                        template <>
                        struct OutputTypeMapper<{input_type}> {{ using type = {output_type}; }};
                        """, ).format(
                            input_type=mapped_input_type,
                            output_type=output_type,
                        ), )

        prefix_statements = textwrap.dedent(
            """\
            template <typename T>
            struct OutputTypeMapper {{}};
            {}
            """, ).format("".join(overrides))

        output_type = "typename OutputTypeMapper<{}>::type".format(input_type)

        if item.is_output_a_template:
            template_suffix.append(output_type)

    if template_suffix:
        template_suffix = "<{}>".format(", ".join(template_suffix))
    else:
        template_suffix = ""

    input_transformation_statement = "input_data[i]"

    if item.input_type in _cpp_input_type_transformations:
        input_transformation_statement = _cpp_input_type_transformations[
            item.input_type](input_transformation_statement)

    if item.is_input_optional:
        prefix_statements += textwrap.dedent(
            """\
            inline float const& PreprocessOptional(float const& value) { return value; }
            inline double const& PreprocessOptional(double const& value) { return value; }
            inline nonstd::optional<std::string> PreprocessOptional(std::string value) {
              return value.empty() ? nonstd::optional<std::string>() : nonstd::optional<std::string>(std::move(value));
            }
            """, )

        # We do not want a move here, as the floats and doubles don't gain anything from a move,
        # and the string will be copied to optional anyway.
        input_transformation_statement = "PreprocessOptional({})".format(
            input_transformation_statement)

    if custom_struct_data is None or output_type not in custom_struct_data:
        prepare_output_statements = [
            textwrap.dedent(
                """\
                Tensor* output_tensor(ctx->Output(0, input_tensor->Shape()));
                {output_type}* output_data(output_tensor->MutableData<{output_type}>());
                """, ).format(output_type=_GetCppTypeMapping(output_type), ),
        ]

        output_statements = [
            "output_data[i] = transformer.execute({});".format(
                input_transformation_statement, )
        ]

    else:
        # If the output is a custom struct, we assume that all outputs will be that custom struct
        assert len(output_type_mappings) == 1, output_type_mappings
        assert custom_struct_data
        assert len(custom_struct_data) == 1, custom_struct_data

        (
            prepare_output_statements,
            output_statements,
        ) = next(six.itervalues(custom_struct_data),
                 ).GetKernelInitializeAssignAndPreprocessorStatements(
                     transformer_name,
                     input_transformation_statement,
                 )

    class_content = class_content_template.format(
        transformer_name=transformer_name,
        template_parameters=template_parameters,
        input_types=", ".join(input_types),
        content=StringHelpers.LeftJustify(
            textwrap.dedent(
                """\
                // Create the transformer
                Microsoft::Featurizer::Featurizers::{transformer_name}{template_suffix} transformer(
                  [ctx](void) {{
                    const auto* state_tensor(ctx->Input<Tensor>(0));
                    const uint8_t* const state_data(state_tensor->Data<uint8_t>());

                    Microsoft::Featurizer::Archive archive(state_data, state_tensor->Shape().GetDims()[0]);
                    return Microsoft::Featurizer::Featurizers::{transformer_name}{template_suffix}(archive);
                  }}());

                // Get the input
                const auto* input_tensor(ctx->Input<Tensor>(1));
                const {input_type}* input_data(input_tensor->Data<{input_type}>());

                // Prepare the output
                {prepare_output_statements}

                // Execute
                const int64_t length(input_tensor->Shape().Size());

                for (int64_t i = 0; i < length; ++i) {{
                  {output_statements}
                }}
                """, ).format(
                    transformer_name=transformer_name,
                    template_suffix=template_suffix,
                    input_type=input_type,
                    prepare_output_statements="\n".join(
                        prepare_output_statements).rstrip(),
                    output_statements=StringHelpers.LeftJustify(
                        "\n".join(output_statements).rstrip(), 2),
                ).rstrip(),
            4,
        ),
    )

    with open_file_func(
            os.path.join(
                output_dir,
                "{}.cc".format(
                    "_".join(re.findall("[a-zA-Z][^A-Z]*",
                                        transformer_name)).lower(), ),
            ),
            "w",
    ) as f:
        f.write(
            textwrap.dedent(
                """\
                // Copyright (c) Microsoft Corporation. All rights reserved.
                // Licensed under the MIT License.

                #include "core/common/common.h"
                #include "core/framework/data_types.h"
                #include "core/framework/data_types_internal.h"
                #include "core/framework/op_kernel.h"

                #include "Featurizers/{featurizer_name}.h"
                #include "Archive.h"

                namespace onnxruntime {{
                namespace featurizers {{

                {prefix_statements}{class_content}

                ONNX_OPERATOR_KERNEL_EX(
                    {transformer_name},
                    kMSFeaturizersDomain,
                    1,
                    kCpuExecutionProvider,
                    KernelDefBuilder()
                        .TypeConstraint("T0", DataTypeImpl::GetTensorType<uint8_t>())
                        {additional_constraints},
                    {transformer_name});

                }}  // namespace featurizers
                }}  // namespace onnxruntime
                """, ).format(
                    featurizer_name=items[0].name,
                    transformer_name=transformer_name,
                    prefix_statements="{}\n\n".format(
                        prefix_statements.rstrip())
                    if prefix_statements else "",
                    class_content=class_content.rstrip(),
                    additional_constraints=StringHelpers.LeftJustify(
                        additional_constraints, 8),
                ), )

    return 0
    def Callback(test_lock, configuration, build_dir, output_stream, on_status_update):
        on_status_update("Generating")
        _PrintHeader("Generate Output", output_stream)

        if os.path.isdir(build_dir):
            if not force:
                output_stream.write(
                    "The output dir '{}' already exists and will not be overwritten.\n".format(
                        build_dir,
                    ),
                )
                return 1

            FileSystem.RemoveTree(build_dir)

        FileSystem.MakeDirs(build_dir)

        result = Process.Execute(
            command_line_template.format(
                build_dir=build_dir,
                configuration=configuration,
            ),
            output_stream,
        )
        if result != 0:
            return result

        # Create a python file that can be used to clean the directory
        existing_items = os.listdir(build_dir)
        assert existing_items

        with open(os.path.join(build_dir, "Clean.py"), "w") as f:
            f.write(
                textwrap.dedent(
                    """\
                    #!/usr/bin/env python

                    import os
                    import sys

                    import CommonEnvironment
                    from CommonEnvironment import CommandLine
                    from CommonEnvironment import FileSystem
                    from CommonEnvironment.StreamDecorator import StreamDecorator

                    # ----------------------------------------------------------------------
                    _script_fullpath                            = CommonEnvironment.ThisFullpath()
                    _script_dir, _script_name                   = os.path.split(_script_fullpath)
                    # ----------------------------------------------------------------------

                    @CommandLine.EntryPoint
                    @CommandLine.Constraints(
                        output_stream=None,
                    )
                    def EntryPoint(
                        all=False,
                        output_stream=sys.stdout,
                    ):
                        with StreamDecorator(output_stream).DoneManager(
                            line_prefix="",
                            prefix="\\nResults: ",
                            suffix="\\n",
                        ) as dm:
                            existing_items = set([{existing_items_list}])

                            for item in os.listdir(_script_dir):
                                if item in existing_items or item == _script_name:
                                    continue

                                fullpath = os.path.join(_script_dir, item)

                                dm.stream.write("Removing '{{}}'...".format(fullpath))
                                with dm.stream.DoneManager():
                                    FileSystem.RemoveItem(fullpath)

                            cmake_dirs = os.path.join(_script_dir, "CMakeFiles")

                            if all:
                                dm.stream.write("Removing '{{}}'...".format(cmake_dirs))
                                with dm.stream.DoneManager():
                                    FileSystem.RemoveTree(cmake_dirs)

                            else:
                                dirs_to_delete = []

                                for fullpath, _ in FileSystem.WalkDirs(
                                    cmake_dirs,
                                    include_dir_names=[lambda name: os.path.splitext(name)[1] == ".dir"],
                                ):
                                    dirs_to_delete.append(fullpath)

                                for dir_to_delete in dirs_to_delete:
                                    dm.stream.write("Removing '{{}}'...".format(dir_to_delete))
                                    with dm.stream.DoneManager():
                                        FileSystem.RemoveTree(dir_to_delete)

                            return dm.result


                    # ----------------------------------------------------------------------
                    # ----------------------------------------------------------------------
                    # ----------------------------------------------------------------------
                    if __name__ == "__main__":
                        try:
                            sys.exit(CommandLine.Main())
                        except KeyboardInterrupt:
                            pass
                    """,
                ).format(
                    existing_items_list=", ".join(
                        ['"{}"'.format(existing_item) for existing_item in existing_items],
                    ),
                ),
            )

        if build:
            on_status_update("Building")
            _PrintHeader("Build Output", output_stream)

            result = _BuildImpl(build_dir, output_stream)
            if result != 0:
                return result

        if test:
            on_status_update("Testing (Waiting)")
            _PrintHeader("Test Output", output_stream)

            with test_lock:
                on_status_update("Testing")

                result = _TestImpl(build_dir, output_stream)
                if result != 0:
                    return result

        return 0
Example #21
0
def Main( config,
          original_args=sys.argv,                       # <Dangerous default value> pylint: disable = W0102
          command_line_arg_prefix='/',
          command_line_keyword_separator='=',
          command_line_dict_tag_value_separator=':',
          verbose=False,
          output_stream=sys.stdout,
        ):
    """Method called in a build file's entry point"""

    assert config
    assert original_args
    assert command_line_arg_prefix
    assert command_line_keyword_separator
    assert command_line_dict_tag_value_separator
    assert output_stream

    # Some build file functions are required, others are not
    required = { "build" : lambda ep: _RedirectEntryPoint("Build", ep, config),
                 "clean" : lambda ep: _RedirectEntryPoint("Clean", ep, config),
               }

    required_names = set(required.keys())

    entry_points = CommandLine.EntryPointInformation.FromModule(sys.modules["__main__"])
    for entry_point in entry_points:
        entry_point_name_lower = entry_point.Name.lower()

        if entry_point_name_lower in required_names:
            required[entry_point_name_lower](entry_point)
            required[entry_point_name_lower] = entry_point

            required_names.remove(entry_point_name_lower)

        else:
            for reserved_name in [ "Rebuild",
                                   "Metadata",
                                 ]:
                if entry_point_name_lower == reserved_name.lower():
                    raise Exception("The name '{}' is reserved and will be automatically generated".format(reserved_name))

    if required_names:
        raise Exception("These methods must be defined: {}".format(', '.join(required_names)))

    entry_points.append(CommandLine.EntryPointInformation.FromFunction(_GenerateRebuild( required["build"],
                                                                                         required["clean"],
                                                                                         config,
                                                                                       )))

    # ----------------------------------------------------------------------
    @CommandLine.EntryPoint
    def Metadata():
        sys.stdout.write(str(config))

    # ----------------------------------------------------------------------

    entry_points.append(CommandLine.EntryPointInformation.FromFunction(Metadata))

    config = CompleteConfiguration( [ entry_point.Name for entry_point in entry_points ],
                                    config,
                                  )

    script_description_suffix = None
    if config.Configurations:
        script_description_suffix = "    Where <configuration> can be:\n\n{}\n".format('\n'.join([ "        - {}".format(cfg) for cfg in config.Configurations ]))

    # Execute
    stack_frame = inspect.stack()[-1]
    
    current_dir = os.getcwd()
    os.chdir(os.path.dirname(os.path.abspath(stack_frame[1])))

    with CallOnExit(lambda: os.chdir(current_dir)):
        # Ensure that an output directory is created prior to invoking build functionality
        if config.RequiresOutputDir and len(original_args) >= 2 and original_args[1].lower() in [ "build", "rebuild", ]:
            output_dir = None

            if config.Configurations:
                # Command line is: <script> build <config> <output_dir> ...
                if len(original_args) >= 4:
                    output_dir = original_args[3]
            else:
                # Command line is: <script build <output_dir>
                if len(original_args) >= 3:
                    output_dir = original_args[2]

            if output_dir:
                FileSystem.MakeDirs(output_dir)

        return CommandLine.Executor( args=original_args,
                                     command_line_arg_prefix=command_line_arg_prefix,
                                     command_line_keyword_separator=command_line_keyword_separator,
                                     command_line_dict_tag_value_separator=command_line_dict_tag_value_separator,
                                     script_description=inspect.getmodule(stack_frame[0]).__doc__ or '',
                                     script_description_suffix=script_description_suffix,
                                     entry_points=entry_points,
                                   ).Invoke( verbose=verbose,
                                             output_stream=output_stream,
                                           )