def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: input_file = os.path.join(_script_dir, "..", "SimpleSchema.g4") assert os.path.isfile(input_file), input_file output_dir = os.path.join(_script_dir, "..", "GeneratedCode") command_line = '{script} Compile Python3 -o "{output_dir}" -no-listener -visitor "{input_file}"'.format( script=CurrentShell.CreateScriptName("ANTLR"), output_dir=output_dir, input_file=input_file, ) dm.result = Process.Execute(command_line, dm.stream) return dm.result
def test_Invoke(self): include_filename = CurrentShell.CreateTempFilename(".SimpleSchema") with open(include_filename, "w") as f: f.write( textwrap.dedent( """\ <a_string string> """, ), ) with CallOnExit(lambda: os.remove(include_filename)): root = _Invoke( "simple_schema_include('{}')".format(include_filename)) self.assertEqual(len(root.items), 1) item = root.items[0] self.assertEqual(item.name, "a_string")
def GetItems(directory, ignore_set): items = [] assert os.path.isdir(directory), directory is_bin_dir = PythonActivationActivity.BinSubdirs and directory.endswith(os.path.join(*PythonActivationActivity.BinSubdirs)) for item in os.listdir(directory): if item in ignore_set: continue fullpath = os.path.join(directory, item) if not CurrentShell.IsSymLink(fullpath): if ( CurrentShell.CategoryName == "Linux" and is_bin_dir and item.startswith("python") ): continue items.append(fullpath) return items
def _BuildGenerator( source_dir, configuration, generator=_DEFAULT_GENERATOR, ): temp_dir = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_dir)): command_line = 'cmake {generator}-S "{source_dir}" -B "{build_dir}" -DCppCommon_CMAKE_DEBUG_OUTPUT=On -DCMAKE_BUILD_TYPE={config}'.format( generator='-G "{}" '.format(generator) if generator else "", source_dir=source_dir, build_dir=temp_dir, config=configuration, ) result, output = Process.Execute(command_line) if result == 0: result, output = Process.Execute('cmake --build "{}"'.format(temp_dir)) yield temp_dir, result, output
def _InvokeImpl(cls, invoke_reason, context, status_stream, verbose_stream, verbose): with status_stream.DoneManager( associated_stream=verbose_stream) as (this_dm, this_verbose_stream): generated_python_context = cls._GenerateScriptContent(context) assert generated_python_context temp_filename = CurrentShell.CreateTempFilename(".py") with open(temp_filename, 'w') as f: f.write(generated_python_context) if context["preserve_temp_dir"]: this_dm.stream.write("Writing to '{}'\n".format(temp_filename)) cleanup_func = lambda: None else: cleanup_func = lambda: os.remove(temp_filename) try: sink = six.moves.StringIO() this_dm.result = cls._Compile( context, temp_filename, StreamDecorator([ sink, this_verbose_stream, ])) if this_dm.result != 0: if not verbose: this_dm.stream.write(sink.getvalue()) return this_dm.result finally: if this_dm.result == 0: cleanup_func()
def Build( force=False, output_stream=sys.stdout, verbose=False, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: dm.result = Process.Execute( '"{script}" Generate PythonYaml CodeCoverageFilter "{output_dir}" "/input={input_file}" /plugin_arg=no_serialization:True{force}{verbose}' .format( script=CurrentShell.CreateScriptName("SimpleSchemaGenerator"), output_dir=os.path.join(_script_dir, "..", "GeneratedCode"), input_file=os.path.join(_script_dir, "..", "CodeCoverageFilter.SimpleSchema"), force=" /force" if force else "", verbose=" /verbose" if verbose else "", ), dm.stream, ) return dm.result
def Install( lib_name, pip_arg=None, output_stream=sys.stdout, ): """ A replacement for pip install. Will ensure that already installed python libraries are not modified in-place, but rather considered as new libraries for the currently activated repository. """ pip_args = pip_arg; del pip_arg repo_root = os.getenv(RepositoryBootstrapConstants.DE_REPO_ROOT_NAME) scm = GetSCM(repo_root, raise_on_error=False) if not scm: output_stream.write("ERROR: No SCM is active for '{}'.\n".format(repo_root)) return -1 if scm.HasWorkingChanges(repo_root) or scm.HasUntrackedWorkingChanges(repo_root): output_stream.write("ERROR: Changes were detected in '{}'; please revert/shelve these changes and run this script again.\n".format(repo_root)) return -1 with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: pip_command_line = 'pip install "{}"{}'.format( lib_name, '' if not pip_args else " {}".format(' '.join(pip_args)), ) dm.stream.write("\nDetecting libraries...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: libraries = [] # ---------------------------------------------------------------------- def OnOutput(line): this_dm.stream.write(line) if not line.startswith("Installing collected packages: "): return True line = line[len("Installing collected packages: "):] for library in line.split(','): library = library.strip() if library: libraries.append(library) return False # ---------------------------------------------------------------------- this_dm.result = Process.Execute( pip_command_line, OnOutput, line_delimited_output=True, ) if libraries: this_dm.result = 0 if this_dm.result != 0: return this_dm.result if not libraries: return dm.result dm.stream.write("Reverting local changes...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: this_dm.result = scm.Clean(repo_root, no_prompt=True)[0] if this_dm.result != 0: return this_dm.result dm.stream.write("Reverting existing libraries...") with dm.stream.DoneManager( suffix='\n', ) as this_dm: python_lib_dir = os.path.join( os.getenv(RepositoryBootstrapConstants.DE_REPO_GENERATED_NAME), PythonActivationActivity.Name, _EnvironmentSettings().LibraryDir, ) assert os.path.isdir(python_lib_dir), python_lib_dir library_items = {} for name in os.listdir(python_lib_dir): fullpath = os.path.join(python_lib_dir, name) if not os.path.isdir(fullpath): continue library_items[name.lower()] = CurrentShell.IsSymLink(fullpath) # ---------------------------------------------------------------------- def RemoveItem(name): name_lower = name.lower() if library_items[name_lower]: this_dm.stream.write("Removing '{}' for upgrade.\n".format(name)) os.remove(os.path.join(python_lib_dir, name)) else: this_dm.stream.write("Removing temporary '{}'.\n".format(name)) FileSystem.RemoveTree(os.path.join(python_lib_dir, name)) del library_items[name_lower] # ---------------------------------------------------------------------- for library in libraries: potential_library_names = [ library, ] # Sometimes, a library's name will begin with a 'Py' but be saved in # the file system without the 'Py' prefix. Account for that scenario. if library.lower().startswith("py"): potential_library_names.append(library[len("py"):]) for potential_library_name in potential_library_names: potential_library_name_lower = potential_library_name.lower() if potential_library_name_lower not in library_items: continue RemoveItem(potential_library_name) # Is there dist- or egg-info as well? info_items = [] for item in six.iterkeys(library_items): if ( item.startswith(potential_library_name_lower) and (item.endswith(".dist-info") or item.endswith(".egg-info")) ): info_items.append(item) for info_item in info_items: RemoveItem(info_item) break dm.stream.write("Installing...") with dm.stream.DoneManager() as this_dm: this_dm.result = Process.Execute(pip_command_line, this_dm.stream) return dm.result
def Build( force=False, no_squash=False, keep_temporary_image=False, output_stream=sys.stdout, preserve_ansi_escape_sequences=False, ): with StreamDecorator.GenerateAnsiSequenceStream( output_stream, preserve_ansi_escape_sequences=preserve_ansi_escape_sequences, ) as output_stream: with StreamDecorator(output_stream).DoneManager( line_prefix='', prefix="\nResults: ", suffix='\n', ) as dm: if not _VerifyDocker(): dm.stream.write("ERROR: Ensure that docker is installed and available within this environment.\n") dm.result = -1 return dm.result output_dir = os.path.join(calling_dir, "Generated") source_dir = os.path.join(output_dir, "Source") base_image_dir = os.path.join(output_dir, "Images", "Base") activated_image_dir = os.path.join(output_dir, "Images", "Activated") image_code_base = "/usr/lib/CommonEnvironmentImage" image_code_dir = "{}/{}".format( image_code_base, repository_name.replace('_', '/'), ) if no_now_tag: now_tag = None else: now = time.localtime() now_tag = "{0}.{1:02d}.{2:02d}".format(now[0], now[1], now[2]) # Create the base image dm.stream.write("Creating base image...") with dm.stream.DoneManager(suffix='\n') as base_dm: FileSystem.MakeDirs(base_image_dir) # Get the source scm = GetAnySCM(calling_dir) if not os.path.isdir(source_dir): base_dm.stream.write("Cloning source...") with base_dm.stream.DoneManager() as this_dm: # Ensure that the parent dir exists, but don't create the dir iteself. FileSystem.MakeDirs(os.path.dirname(source_dir)) # Enlist in the repo. temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) this_dm.result, output = scm.Clone(repository_uri, temp_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result os.rename(temp_dir, source_dir) has_changes = True else: # The repo exists base_dm.stream.write("Updating source...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = scm.Pull(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result has_changes = True if scm.Name == "Mercurial": if "no changes found" in output: has_changes = False elif scm.Name == "Git": if "Already up-to-date" in output: has_changes = False else: assert False, "Unsupported SCM: {}".format(scm.Name) if has_changes: this_dm.result, output = scm.Update(source_dir) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # Filter the source filtered_source_dir = os.path.join(base_image_dir, "FilteredSource") if os.path.isdir(filtered_source_dir) and not force and not has_changes: base_dm.stream.write("No source changes were detected.\n") else: with base_dm.stream.SingleLineDoneManager( "Filtering source...", ) as this_dm: temp_dir = CurrentShell.CreateTempDirectory() FileSystem.RemoveTree(temp_dir) FileSystem.CopyTree( source_dir, temp_dir, excludes=[ "/.git", "/.gitignore", "/.hg", "/.hgignore", "*/Generated", "*/__pycache__", "*/Windows", "/*/src", "*.cmd", "*.ps1", "*.pyc", "*.pyo", ], optional_output_stream=this_dm.stream, ) FileSystem.RemoveTree(filtered_source_dir) os.rename(temp_dir, filtered_source_dir) base_dm.stream.write("Verifying Docker base image...") with base_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker image history "{}"'.format(base_docker_image)) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result base_dm.stream.write("Creating dockerfile...") with base_dm.stream.DoneManager(): setup_statement = "./Setup.sh{}".format('' if not repository_setup_configurations else ' {}'.format(' '.join([ '"/configuration={}"'.format(configuration) for configuration in repository_setup_configurations ]))) if repository_name == "Common_Environment": commands = textwrap.dedent( """\ RUN link /usr/bin/python3 /usr/bin/python RUN adduser --disabled-password --disabled-login --gecos "" "{username}" \\ && addgroup "{groupname}" \\ && adduser "{username}" "{groupname}" RUN cd {image_code_dir} \\ && {setup_statement} """).format( username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, setup_statement=setup_statement, ) else: import io with io.open( os.path.join(base_image_dir, "SetupEnvironmentImpl.sh"), 'w', newline='\n', ) as f: f.write(textwrap.dedent( """\ #!/bin/bash . {image_code_base}/Common/Environment/Activate.sh python36 cd {image_code_dir} {setup_statement} rm --recursive {image_code_base}/Common/Environment/Generated/Linux/Default """).format( image_code_base=image_code_base, image_code_dir=image_code_dir, setup_statement=setup_statement, )) commands = textwrap.dedent( """\ COPY SetupEnvironmentImpl.sh /tmp/SetupEnvironmentImpl.sh RUN chmod a+x /tmp/SetupEnvironmentImpl.sh \\ && /tmp/SetupEnvironmentImpl.sh """) with open(os.path.join(base_image_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {base_image} COPY FilteredSource {image_code_dir} {commands} RUN chown -R {username}:{groupname} {image_code_dir} \\ && chmod g-s {image_code_dir}/Generated/Linux \\ && chmod 0750 {image_code_dir}/Generated/Linux \\ && chmod -R o-rwx {image_code_dir} # Cleanup RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* LABEL maintainer="{maintainer}" # By default, run a bash prompt as the source code user WORKDIR {image_code_dir} CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] """).format( base_image=base_docker_image, commands=commands, username=image_username, groupname=image_groupname, image_code_dir=image_code_dir, maintainer=maintainer, )) base_dm.stream.write("Building Docker image...") with base_dm.stream.DoneManager() as this_dm: tags = [ "base", "base_latest", ] if now_tag: tags.append("base_{}".format(now_tag)) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=base_image_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result if not no_activated_image: # Create the activated image(s) dm.stream.write("Creating activated image(s)...") with dm.stream.DoneManager() as all_activated_dm: for index, configuration in enumerate(repository_activation_configurations): all_activated_dm.stream.write("Creating activated image{} ({} of {})...".format( '' if not configuration else " for the configuration '{}'".format(configuration), index + 1, len(repository_activation_configurations), )) with all_activated_dm.stream.DoneManager(suffix='\n') as activated_dm: this_activated_dir = os.path.join(activated_image_dir, configuration or "Default") FileSystem.MakeDirs(this_activated_dir) unique_id = str(uuid.uuid4()) temp_image_name = "{}_image".format(unique_id) temp_container_name = "{}_container".format(unique_id) # Activate the image so we can extract the changes activated_dm.stream.write("Activating...") with activated_dm.stream.DoneManager(suffix='\n') as this_dm: command_line = 'docker run -it --name "{container_name}" "{image_name}:base_latest" /sbin/my_init -- /sbin/setuser "{username}" bash -c "cd {image_code_dir} && . ./Activate.sh {configuration} && pushd {image_code_base}/Common/Environment && python -m RepositoryBootstrap.EnvironmentDiffs After /decorate' \ .format( container_name=temp_container_name, image_name=docker_image_name, configuration=configuration or '', username=image_username, image_code_dir=image_code_dir, image_code_base=image_code_base, ) sink = six.moves.StringIO() this_dm.result = Process.Execute(command_line, StreamDecorator([ sink, this_dm.stream, ])) if this_dm.result != 0: return this_dm.result sink = sink.getvalue() activated_dm.stream.write("Extracting enviroment diffs...") with activated_dm.stream.DoneManager(): match = re.search( textwrap.dedent( """\ //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// (?P<content>.+?) //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// """), sink, re.DOTALL | re.MULTILINE, ) assert match, sink environment_diffs = json.loads(match.group("content")) # ---------------------------------------------------------------------- def RemoveTempContainer(): activated_dm.stream.write("Removing temp container...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rm "{}"'.format(temp_container_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempContainer): # Commit the activated image activated_dm.stream.write("Committing container...") with activated_dm.stream.DoneManager() as this_dm: command_line = 'docker commit "{container_name}" "{image_name}"' \ .format( container_name=temp_container_name, image_name=temp_image_name, ) this_dm.result, output = Process.Execute(command_line) if this_dm.result != 0: this_dm.stream.write(output) return this_dm.result # ---------------------------------------------------------------------- def RemoveTempImage(): if keep_temporary_image: return activated_dm.stream.write("Removing temp image...") with activated_dm.stream.DoneManager() as this_dm: this_dm.result, output = Process.Execute('docker rmi "{}"'.format(temp_image_name)) if this_dm.result != 0: this_dm.stream.write(output) # ---------------------------------------------------------------------- with CallOnExit(RemoveTempImage): # Create a new dockerfile. The temp image has all the harddrive changes # made during activation, but doesn't have the environment changes. activated_dm.stream.write("Creating dockerfile...") with activated_dm.stream.DoneManager() as this_dm: with open(os.path.join(this_activated_dir, "Dockerfile"), 'w') as f: f.write(textwrap.dedent( """\ FROM {temp_image_name} ENV {env} # By default, run a bash prompt as the source code user CMD [ "/sbin/my_init", "/sbin/setuser", "{username}", "bash" ] LABEL maintainer="{maintainer}" """).format( temp_image_name=temp_image_name, env='\\\n'.join([ ' {}={} '.format(k, v) for k, v in six.iteritems(environment_diffs) ]), image_code_dir=image_code_dir, maintainer=maintainer, username=image_username, )) activated_dm.stream.write("Building Docker image...") with activated_dm.stream.DoneManager() as this_dm: tags = [ "latest", ] if now_tag: tags.append(now_tag) if len(repository_activation_configurations) > 1: tags = [ "{}_{}".format(configuration, tag) for tag in tags ] tags.insert(0, configuration) command_line = 'docker build "{dir}" {tags}{squash}{force}' \ .format( dir=this_activated_dir, tags=' '.join([ '--tag "{}:{}"'.format(docker_image_name, tag) for tag in tags ]), squash='', # <squash is not supported here> '' if no_squash else " --squash", force=" --no-cache" if force else '', ) this_dm.result = Process.Execute(command_line, this_dm.stream) if this_dm.result != 0: return this_dm.result return dm.result
def Build( configuration, output_dir, release_build=False, prerelease_build_name=None, no_build_info=False, keep_temp_dir=False, cmake_generator=( None if os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" or os.getenv("DEVELOPMENT_ENVIRONMENT_CPP_USE_DEFAULT_CMAKE_GENERATOR") else "Ninja" ), output_stream=sys.stdout, verbose=False, ): """Builds the Featurizer Shared Library""" if release_build and prerelease_build_name: raise CommandLine.UsageException( "A prerelese build name cannot be provided with the 'release_build' flag", ) with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: FileSystem.RemoveTree(output_dir) FileSystem.MakeDirs(output_dir) temp_directory = CurrentShell.CreateTempDirectory() # ---------------------------------------------------------------------- def CleanupTempDir(): if keep_temp_dir: dm.stream.write( "\nCMake output has been written to '{}'.\n".format(temp_directory), ) return FileSystem.RemoveTree(temp_directory) # ---------------------------------------------------------------------- with CallOnExit(CleanupTempDir): prev_dir = os.getcwd() os.chdir(temp_directory) with CallOnExit(lambda: os.chdir(prev_dir)): if not release_build: if prerelease_build_name is None: # This value should compare as: # "manual" < "pipeline" prerelease_build_name = "manual" if not no_build_info: now = datetime.datetime.now() prerelease_build_name = "{prerelease_build_name}.{year}.{month}.{day}.{hour}.{minute}.{second}.{configuration}".format( year=now.year, month=now.month, day=now.day, hour=now.hour, minute=now.minute, second=now.second, prerelease_build_name=prerelease_build_name, configuration=configuration.lower(), ) activities = [ ( "Generating cmake Files", 'cmake {generator}-DCMAKE_BUILD_TYPE={configuration} {prerelease_build_name} "{this_dir}"'.format( generator='-G "{}" '.format( cmake_generator, ) if cmake_generator else "", temp_dir=temp_directory, configuration=configuration, this_dir=_script_dir, prerelease_build_name="" if not prerelease_build_name else "-DPRODUCT_VERSION_PRERELEASE_INFO={}".format( prerelease_build_name, ), ), ), ("Building", "cmake --build ."), ] if ( os.getenv("DEVELOPMENT_ENVIRONMENT_REPOSITORY_CONFIGURATION") == "universal_linux" ): activities.append( ( "Verifying Universal Linux Binaries", "libcheck libFeaturizers.so", ), ) activities += [ ("Copying Binaries", _CopyBinaries), ("Copying Data", _CopyData), ("Copying Headers", _CopyHeaders), ] for index, (activity, command_line) in enumerate(activities): dm.stream.write( "{} ({} of {})...".format(activity, index + 1, len(activities)), ) with dm.stream.DoneManager( suffix="\n" if verbose else None, ) as this_dm: sink = six.moves.StringIO() output_streams = [sink] if verbose: output_streams.append( StreamDecorator( this_dm.stream, line_prefix="INFO: ", ), ) this_output_stream = StreamDecorator(output_streams) if callable(command_line): this_dm.result = command_line( temp_directory, output_dir, this_output_stream, ) else: this_dm.result = Process.Execute( command_line, this_output_stream, ) if this_dm.result != 0: if not verbose: this_dm.stream.write(sink.getvalue()) return this_dm.result return dm.result
def CodeGeneratorFactory( plugin_map, name, description, filename_validation_expression, get_optional_metadata_func, # def Func() -> [ (k, v), ... ] create_context_func, # def Func(metadata, plugin) -> context invoke_func, # def Func(invoke_reason, context, status_stream, verbose_stream, verbose) -> result code is_supported_content_func=None, # def Func(item) -> bool postprocess_context_func=None, # def Func(context, plugin) -> context requires_output_name=True, ): """Returns a CodeGenerator object""" assert get_optional_metadata_func assert create_context_func assert invoke_func calling_frame = inspect.stack()[1] calling_mod_filename = os.path.realpath( inspect.getmodule(calling_frame[0]).__file__) if CurrentShell.IsSymLink(calling_mod_filename): calling_mod_filename = CurrentShell.ResolveSymLink( calling_mod_filename) # ---------------------------------------------------------------------- @staticderived class CodeGenerator( AtomicInputProcessingMixin, ConditionalInvocationQueryMixin, MultipleOutputMixin, CodeGeneratorMod.CodeGenerator, ): # ---------------------------------------------------------------------- # | # | Public Properties # | # ---------------------------------------------------------------------- Name = DerivedProperty(name) Description = DerivedProperty(description) InputTypeInfo = DerivedProperty( FilenameTypeInfo( validation_expression=filename_validation_expression)) OriginalModuleFilename = calling_mod_filename RequiresOutputName = requires_output_name # ---------------------------------------------------------------------- # | # | Public Methods # | # ---------------------------------------------------------------------- @staticmethod @override def IsSupportedContent(filename): return is_supported_content_func is None or is_supported_content_func( filename) # ---------------------------------------------------------------------- # | # | Protected Methods # | # ---------------------------------------------------------------------- @classmethod @override def _GetOptionalMetadata(cls): return get_optional_metadata_func() + \ [ ( "plugin_settings", {} ), ] + \ super(CodeGenerator, cls)._GetOptionalMetadata() # ---------------------------------------------------------------------- @classmethod @override def _GetRequiredMetadataNames(cls): names = [ "plugin_name", ] if cls.RequiresOutputName: names += [ "output_name", ] names += super(CodeGenerator, cls)._GetRequiredMetadataNames() return names # ---------------------------------------------------------------------- @classmethod @override def _CreateContext(cls, metadata, status_stream): if metadata["plugin_name"] not in plugin_map: raise CommandLine.UsageException( "'{}' is not a valid plugin".format( metadata["plugin_name"])) plugin = plugin_map[metadata["plugin_name"]].Plugin # Ensure that all plugin settings are present and that they # are the expected type. custom_settings = OrderedDict([ (k, v) for k, v in plugin.GenerateCustomSettingsAndDefaults() ]) plugin_settings = metadata["plugin_settings"] for k, v in six.iteritems(plugin_settings): if k not in custom_settings: raise CommandLine.UsageException( "'{}' is not a valid plugin setting".format(k)) desired_type = type(custom_settings[k]) if type(v) != desired_type: assert isinstance(v, (str, UnicodeDecodeError)), (v, type(v)) plugin_settings[k] = StringSerialization.DeserializeItem( CreateFromPythonType(desired_type), v) for k, v in six.iteritems(custom_settings): if k not in plugin_settings: plugin_settings[k] = v metadata["plugin_settings"] = plugin.PreprocessMetadata( plugin_settings) # Invoke custom functionality context = create_context_func(metadata, plugin) context = plugin.PreprocessContext(context) context["output_filenames"] = [ os.path.join(context["output_dir"], filename) for filename in plugin.GenerateOutputFilenames(context) ] context = plugin.PostprocessContext(context) if postprocess_context_func: context = postprocess_context_func(context, plugin) return super(CodeGenerator, cls)._CreateContext(context, status_stream) # ---------------------------------------------------------------------- @classmethod @override def _InvokeImpl( cls, invoke_reason, context, status_stream, verbose_stream, verbose, ): return invoke_func( cls, invoke_reason, context, status_stream, verbose_stream, verbose, plugin_map[context["plugin_name"]].Plugin, ) # ---------------------------------------------------------------------- @classmethod @override def _GetAdditionalGeneratorItems(cls, context): # ---------------------------------------------------------------------- def ProcessorGeneratorItem(item): if isinstance(item, six.string_types) and item in plugin_map: return plugin_map[item].Plugin return item # ---------------------------------------------------------------------- plugin = plugin_map[context["plugin_name"]].Plugin return [ cls, cls.OriginalModuleFilename, plugin, ] + \ list(plugin.GetAdditionalGeneratorItems(context)) + \ super(CodeGenerator, cls)._GetAdditionalGeneratorItems(context) # ---------------------------------------------------------------------- return CodeGenerator
def Execute( cls, compiler, context, command_line, includes=None, excludes=None, verbose=False, ): assert command_line includes = includes or [] excludes = excludes or [] # Get the name of the script to execute if command_line.lower().startswith("python"): filename = command_line[len("python"):].replace('"', '').strip() assert os.path.isfile(filename), filename else: filename = command_line # Attempt to extract include and exclude information from the source disable_code_coverage = False if not disable_code_coverage and not includes and not includes: regex = re.compile( textwrap.dedent(r"""(?# Header )^.*?#\s*(?# Label )code_coverage\s*:\s*(?# Action )(?P<action>\S+)(?# +Optional )(?:(?# Assignment )\s*=\s*(?# +Quote )(?P<quote>")?(?# Name )(?P<name>.+?)(?# -Quote )(?P=quote)?(?# -Optional ))?(?# Suffix )\s*$(?# )""")) for index, line in enumerate(open(filename).readlines()): match = regex.match(line) if not match: continue action = match.group("action").lower() if action == "disable": disable_code_coverage = True elif action in [ "include", "exclude", ]: referenced_filename = match.group("name") referenced_filename = os.path.abspath( os.path.join(os.path.dirname(filename), referenced_filename)) if not os.path.isfile(referenced_filename): raise Exception( "'{}', referenced on line {}, is not a valid filename" .format(referenced_filename, index + 1)) if action == "include": includes.append(referenced_filename) elif action == "exclude": excludes.append(referenced_filename) else: assert False, action else: raise Exception( "'{}' is not a supported action".format(action)) if disable_code_coverage: return StandardTestExecutor.Execute( compiler, context, 'python "{}"'.format(filename), ) # Attempt to determine include and exclude information based on the original filename if not disable_code_coverage and not includes and not excludes: sut_filename = compiler.TestToItemName(filename) # Import by python fullpath dirname, basename = os.path.split(sut_filename) stack = [ basename, ] while True: potential_filename = os.path.join(dirname, "__init__.py") if not os.path.isfile(potential_filename): break potential_dirname, basename = os.path.split(dirname) stack.append(basename) if potential_dirname == dirname: break dirname = potential_dirname stack.reverse() includes.append("*/{}".format('/'.join(stack))) # Run the process and calculate code coverage temp_filename = CurrentShell.CreateTempFilename(".py") with open(temp_filename, 'w') as f: f.write( textwrap.dedent("""\ from coverage.cmdline import main main() """)) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): command_line_template = 'python "{}" "{{}}"'.format(temp_filename) # Run the process start_time = time.time() command_line = '{} {} {} {}'.format( command_line_template.format("run"), '"--include={}"'.format(','.join(includes)) if includes else '', '"--omit={}"'.format(','.join(excludes)) if excludes else '', filename, ) test_result, test_output = Process.Execute(command_line) test_time = str( datetime.timedelta(seconds=(time.time() - start_time))) # Get the coverage info start_time = time.time() xml_temp_filename = CurrentShell.CreateTempFilename(".xml") command_line = '{} -o "{}"'.format( command_line_template.format("xml"), xml_temp_filename, ) coverage_result, coverage_output = Process.Execute(command_line) coverage_time = str( datetime.timedelta(seconds=(time.time() - start_time))) coverage_data_filename = xml_temp_filename # Get the percentage info if not os.path.isfile(coverage_data_filename): if coverage_result == 0: coverage_result = -1 coverage_data_filename = None if coverage_result != 0: percentage = None percentages = None else: root = ET.fromstring(open(coverage_data_filename).read()) percentage = float(root.attrib["line-rate"]) * 100 percentages = OrderedDict() for package in root.findall("packages/package"): for class_ in package.findall("classes/class"): percentages[class_.attrib["filename"]] = float( class_.attrib["line-rate"]) * 100 return cls.ExecuteResult( test_result, test_output, test_time, coverage_result, coverage_output, coverage_time, coverage_data_filename, percentage, percentages, )
def _InvokeImpl( cls, invoke_reason, context, status_stream, verbose_stream, verbose, ): # If the file is being invoked as a test file, measure the file under test # rather than the test itself. filename = context["input"] try: filename = cls.TestToItemName(filename) except: pass assert os.path.isfile(filename), filename if os.path.basename(filename) == "__init__.py" and os.path.getsize( filename) == 0: return 0 # Create the lint file configuration_file = os.getenv( cls.CONFIGURATION_ENVIRONMENT_VAR_NAME) or os.path.join( _script_dir, "PythonVerifier.default_configuration") assert os.path.isfile(configuration_file), configuration_file # Write the python script that invokes the linter temp_filename = CurrentShell.CreateTempFilename(".py") with open(temp_filename, 'w') as f: f.write( textwrap.dedent("""\ import sys from pylint import lint lint.Run([ r"--rcfile={config}", r"--msg-template={{path}}({{line}}): [{{msg_id}}] {{msg}}", r"{filename}", ]) """).format( config=configuration_file, filename=filename, )) with CallOnExit(lambda: FileSystem.RemoveFile(temp_filename)): # Run the generated file command_line = 'python "{}"'.format(temp_filename) sink = six.moves.StringIO() output_stream = StreamDecorator([ sink, verbose_stream, ]) regex_sink = six.moves.StringIO() Process.Execute(command_line, StreamDecorator([ regex_sink, output_stream, ])) regex_sink = regex_sink.getvalue() result = 0 # Extract the results match = re.search( r"Your code has been rated at (?P<score>[-\d\.]+)/(?P<max>[\d\.]+)", regex_sink, re.MULTILINE, ) if not match: result = -1 else: score = float(match.group("score")) max_score = float(match.group("max")) assert max_score != 0.0 # Don't measure scores for files in Impl directories is_impl_file = os.path.basename(filename).endswith("Impl") if is_impl_file and not context["explicit_passing_score"]: passing_score = None else: passing_score = context["passing_score"] output_stream.write( textwrap.dedent("""\ Score: {score} (out of {max_score}) Passing Score: {passing_score}{explicit} """).format( score=score, max_score=max_score, passing_score=passing_score, explicit=" (explicitly provided)" if context["explicit_passing_score"] else '', )) if passing_score is not None and score < passing_score: result = -1 if result != 0 and not verbose: status_stream.write(sink.getvalue()) return result
def ExtractCoverageInfo( self, coverage_filename, binary_filename, includes, excludes, output_stream, ): # This is a hack. The names extracted from the coverage files are mangled # while the names provided in includes and excludes are in the glob format. # Split the glob and then determine matches by checking to see if each component # is in the mangled name. There is a lot that could go wrong with this, but # hopefully it is good enough. # ---------------------------------------------------------------------- def ProcessFilter(value): return [part for part in value.split("::") if part != "*"] # ---------------------------------------------------------------------- def Matches(value, parts): for part in parts: if part not in value: return False return True # ---------------------------------------------------------------------- if excludes: excludes = [ProcessFilter(exclude) for exclude in excludes] excludes_func = lambda method_name: any( Matches(method_name, exclude) for exclude in excludes) else: excludes_func = lambda method_name: False if includes: includes = [ProcessFilter(include) for include in includes] includes_func = lambda method_name: any( Matches(method_name, include) for include in includes) else: includes_func = lambda method_name: True # ---------------------------------------------------------------------- def ShouldInclude(method_name): return not excludes_func(method_name) and includes_func( method_name) # ---------------------------------------------------------------------- # grcov will parse every file in the directory which isn't what we want here. Move the coverage # files for this binary to a temp dir, parse that dir, and then remove it. temp_directory = CurrentShell.CreateTempDirectory() with CallOnExit(lambda: FileSystem.RemoveTree(temp_directory)): # ---------------------------------------------------------------------- def GetCoverageFilename(ext): dirname, basename = os.path.split(binary_filename) basename = os.path.splitext(basename)[0] for item in os.listdir(dirname): fullpath = os.path.join(dirname, item) if not os.path.isfile(fullpath): continue this_basename, this_ext = os.path.splitext(item) if this_ext == ext and this_basename.startswith(basename): return fullpath return None # ---------------------------------------------------------------------- gcno_filename = GetCoverageFilename(".gcno") assert gcno_filename and os.path.isfile(gcno_filename), ( binary_filename, gcno_filename) shutil.copyfile( gcno_filename, os.path.join(temp_directory, os.path.basename(gcno_filename)), ) gcda_filename = GetCoverageFilename(".gcda") assert gcda_filename and os.path.isfile(gcda_filename), ( binary_filename, gcda_filename) shutil.copyfile( gcda_filename, os.path.join(temp_directory, os.path.basename(gcda_filename)), ) # Convert the content result = Process.Execute( '{} Lcov "/bin_dir={}" /type=ade'.format( CurrentShell.CreateScriptName("ExtractCoverageInfo"), temp_directory, ), output_stream, ) if result != 0: return result # Note that the coverage files for all output was generated when coverage was stopped. # These coverage files are used to extract coverage percentages for display purposes. # Don't let the output name of the file fool you - these files are different from the # globally generated coverage file. coverage_filename = os.path.join(temp_directory, "lcov.info") assert os.path.isfile(coverage_filename), coverage_filename # Parse the file covered = 0 not_covered = 0 with open(coverage_filename) as f: for line in f.readlines(): content = json.loads(line) if "method" not in content: continue content = content["method"] if ("name" not in content or "total_covered" not in content or "total_uncovered" not in content): continue if not ShouldInclude(content["name"]): continue covered += content["total_covered"] not_covered += content["total_uncovered"] return covered, not_covered
def Build( force=False, output_stream=sys.stdout, verbose=False, ): """Builds components used for Integration tests""" with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: command_line_template = '{script} Generate {{plugin}} {{schema}} "{{output_dir}}" /input="{{input_filename}}" /output_data_filename_prefix={{plugin}} /filter_unsupported_attributes{force}{verbose}'.format( script=CurrentShell.CreateScriptName("SimpleSchemaGenerator"), force=" /force" if force else "", verbose=" /verbose" if verbose else "", ) schema_names = [ ("AllTypes.SimpleSchema", [], " /include=types"), ("DefaultValues.SimpleSchema", [], None), ("DictionaryTest.SimpleSchema", ["PythonJson", "PythonYaml"], None), ("FileSystemTest.SimpleSchema", [], None), ("ProcessAdditionalData.SimpleSchema", [], None), ("Test.SimpleSchema", [], None), ] all_plugin_names = [ "PythonJson", "PythonXml", "PythonYaml", "JsonSchema", "XsdSchema", ] for schema_name_index, (schema_name, plugin_names, schema_flags) in enumerate(schema_names): plugin_names = plugin_names or all_plugin_names schema_flags = schema_flags or "" dm.stream.write("Processing '{}' ({} of {})...".format( schema_name, schema_name_index + 1, len(schema_names))) with dm.stream.DoneManager(suffix="\n", ) as schema_dm: schema_basename = os.path.splitext(schema_name)[0] output_dir = os.path.join(_script_dir, "Generated", schema_basename) schema_name = os.path.join(_script_dir, "..", "Impl", schema_name) assert os.path.isfile(schema_name), schema_name for plugin_index, plugin_name in enumerate(plugin_names): schema_dm.stream.write("Plugin '{}' ({} of {})...".format( plugin_name, plugin_index + 1, len(plugin_names))) with schema_dm.stream.DoneManager( suffix="\n" if verbose else None, ) as plugin_dm: command_line = command_line_template.format( plugin=plugin_name, schema=schema_basename, output_dir=output_dir, input_filename=schema_name, ) if plugin_name.endswith("Schema"): command_line += schema_flags plugin_dm.result, output = Process.Execute( command_line) if plugin_dm.result != 0 or verbose: plugin_dm.stream.write(output) return dm.result
def Build(output_stream=sys.stdout, ): with StreamDecorator(output_stream).DoneManager( line_prefix="", prefix="\nResults: ", suffix="\n", ) as dm: if is_standard_configuration: dm.stream.write( "This build is not active with the 'standard' configuration.\n", ) return dm.result # Build b2 (if necessary) dm.stream.write("Checking for 'b2'...") with dm.stream.DoneManager(suffix="\n", ) as this_dm: b2_filename = os.path.join( boost_root, CurrentShell.CreateExecutableName("b2"), ) if not os.path.isfile(b2_filename): this_dm.stream.write("Building 'b2'...") with this_dm.stream.DoneManager() as build_dm: prev_dir = os.getcwd() os.chdir(boost_root) with CallOnExit(lambda: os.chdir(prev_dir)): if CurrentShell.CategoryName == "Windows": bootstrap_name = "bootstrap.bat" command_line = bootstrap_name else: bootstrap_name = "bootstrap.sh" # Manually set the toolset compiler_name = os.getenv( "DEVELOPMENT_ENVIRONMENT_CPP_COMPILER_NAME", ).lower() if "clang" in compiler_name: toolset = "clang" else: build_dm.stream.write( "ERROR: '{}' is not a recognized compiler.\n" .format(compiler_name, ), ) build_dm.result = -1 return build_dm.result command_line = "./{} --with-toolset={}".format( bootstrap_name, toolset, ) for filename in [ bootstrap_name, os.path.join("tools", "build", "bootstrap.sh"), os.path.join( "tools", "build", "src", "engine", "build.sh", ), ]: assert os.path.isfile(filename), filename CurrentShell.MakeFileExecutable(filename) build_dm.result, output = Process.Execute( command_line) if build_dm.result != 0: build_dm.stream.write(output) return build_dm.result # Build boost (if necessary) dm.stream.write("Building boost...") with dm.stream.DoneManager() as build_dm: prev_dir = os.getcwd() os.chdir(boost_root) architecture = os.getenv( "DEVELOPMENT_ENVIRONMENT_CPP_ARCHITECTURE") with CallOnExit(lambda: os.chdir(prev_dir)): command_line = "b2 --build-type=complete --layout=versioned --build-dir=build/{architecture} --hash stage address-model={architecture} {libs}".format( architecture="64" if architecture == "x64" else "32", libs=" ".join([ "--with-{}".format(lib_name) for lib_name in boost_libs ], ), ) if CurrentShell.CategoryName != "Windows": # TODO: Enable ASLR # command_line = './{} variant=release cxxflags="-fPIC -fpie" linkflags="-pie"'.format(command_line) command_line = "./{} ".format(command_line) build_dm.result = Process.Execute(command_line, build_dm.stream) if build_dm.result != 0: return build_dm.result return dm.result